text stringlengths 8 4.13M |
|---|
use shorthand::ShortHand;
#[derive(ShortHand)]
struct Example {
#[shorthand(enable(forward))] // valid
value_0: String,
#[shorthand(enable(forward(doc)))] // valid
value_1: String,
#[shorthand(disable(forward(doc)))] // valid
value_2: String,
#[shorthand(enable(forward("")))] // invalid
value_3: String,
#[shorthand(enable(forward = ""))] // invalid
value_4: String,
#[shorthand(enable(forward(x = "")))] // invalid
value_5: String,
#[shorthand(enable(""))] // invalid
value_6: String,
#[shorthand(enable(forward(x(y))))] // invalid
value_7: String,
}
fn main() {}
|
extern crate spatialos_sdk_sys;
pub(crate) mod ptr;
pub mod worker;
|
use std::collections::HashSet;
fn divide(mut n: i64) -> Vec<i64> {
let mut d = vec![];
while n > 0 {
d.push(n % 10);
n /= 10;
}
return d;
}
fn main() {
let mut n = 1;
loop {
let x: HashSet<Vec<i64>> = (1..=6).map(|p| n * p)
.map(|x| divide(x))
.map(|x| {
let mut y = x.clone();
y.sort();
return y;
}).collect();
if x.len() == 1 {
println!("{}", n);
return ;
}
n += 1;
}
}
|
use std::ops::Bound;
use chrono::{DateTime, Utc};
use sqlx::postgres::{types::PgRange, PgConnection};
use svc_agent::AgentId;
use uuid::Uuid;
use serde_derive::{Deserialize, Serialize};
////////////////////////////////////////////////////////////////////////////////
#[allow(dead_code)]
#[derive(Clone, Debug)]
pub struct Object {
id: Uuid,
class_id: Uuid,
rtc_id: Uuid,
stream_uri: Option<String>,
segments: Option<Segments>,
modified_segments: Option<Segments>,
started_at: Option<DateTime<Utc>>,
created_at: DateTime<Utc>,
adjusted_at: Option<DateTime<Utc>>,
transcoded_at: Option<DateTime<Utc>>,
created_by: AgentId,
deleted_at: Option<DateTime<Utc>>,
}
impl Object {
#[cfg(test)]
pub fn id(&self) -> Uuid {
self.id
}
pub fn stream_uri(&self) -> Option<&String> {
self.stream_uri.as_ref()
}
pub fn rtc_id(&self) -> Uuid {
self.rtc_id
}
pub fn started_at(&self) -> Option<DateTime<Utc>> {
self.started_at
}
pub fn segments(&self) -> Option<&Segments> {
self.segments.as_ref()
}
pub fn modified_segments(&self) -> Option<&Segments> {
self.modified_segments.as_ref()
}
pub fn modified_or_segments(&self) -> Option<&Segments> {
self.modified_segments.as_ref().or_else(|| self.segments())
}
pub fn adjusted_at(&self) -> Option<DateTime<Utc>> {
self.adjusted_at
}
pub fn transcoded_at(&self) -> Option<DateTime<Utc>> {
self.transcoded_at
}
pub fn created_by(&self) -> &AgentId {
&self.created_by
}
}
////////////////////////////////////////////////////////////////////////////////
pub type BoundedOffsetTuples = Vec<(Bound<i64>, Bound<i64>)>;
#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, sqlx::Type, Default)]
#[sqlx(transparent)]
#[serde(from = "BoundedOffsetTuples")]
#[serde(into = "BoundedOffsetTuples")]
pub struct Segments(Vec<PgRange<i64>>);
impl Segments {
pub fn last(&self) -> Option<&PgRange<i64>> {
self.0.last()
}
pub fn empty() -> Segments {
Segments(vec![])
}
}
impl From<BoundedOffsetTuples> for Segments {
fn from(segments: BoundedOffsetTuples) -> Self {
Self(segments.into_iter().map(PgRange::from).collect())
}
}
impl From<Segments> for BoundedOffsetTuples {
fn from(segments: Segments) -> Self {
segments.0.into_iter().map(|s| (s.start, s.end)).collect()
}
}
impl From<Segments> for Vec<PgRange<i64>> {
fn from(segments: Segments) -> Self {
segments.0
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct RecordingListQuery {
class_id: Uuid,
}
impl RecordingListQuery {
pub fn new(class_id: Uuid) -> Self {
Self { class_id }
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Vec<Object>> {
sqlx::query_as!(
Object,
r#"
SELECT
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
modified_segments AS "modified_segments!: Option<Segments>",
started_at,
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
FROM recording
WHERE class_id = $1 AND deleted_at IS NULL
"#,
self.class_id
)
.fetch_all(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub async fn remove_recording(
class_id: Uuid,
rtc_id: Uuid,
conn: &mut PgConnection,
) -> sqlx::Result<()> {
sqlx::query!(
"DELETE FROM recording WHERE class_id = $1 AND rtc_id = $2",
class_id,
rtc_id
)
.execute(conn)
.await?;
Ok(())
}
////////////////////////////////////////////////////////////////////////////////
pub struct RecordingInsertQuery {
class_id: Uuid,
rtc_id: Uuid,
segments: Option<Segments>,
started_at: Option<DateTime<Utc>>,
stream_uri: Option<String>,
modified_segments: Option<Segments>,
adjusted_at: Option<DateTime<Utc>>,
transcoded_at: Option<DateTime<Utc>>,
created_by: AgentId,
}
impl RecordingInsertQuery {
pub fn new(class_id: Uuid, rtc_id: Uuid, created_by: AgentId) -> Self {
Self {
class_id,
rtc_id,
segments: None,
started_at: None,
stream_uri: None,
modified_segments: None,
adjusted_at: None,
transcoded_at: None,
created_by,
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
INSERT INTO recording (
class_id, rtc_id, stream_uri, segments, modified_segments, started_at, adjusted_at,
transcoded_at, created_by
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
ON CONFLICT (class_id, created_by)
WHERE deleted_at IS NULL
DO UPDATE
SET (rtc_id, stream_uri, segments, modified_segments,
started_at, adjusted_at, transcoded_at, created_by, created_at) =
(EXCLUDED.rtc_id, EXCLUDED.stream_uri, EXCLUDED.segments, EXCLUDED.modified_segments, EXCLUDED.started_at, EXCLUDED.adjusted_at,
EXCLUDED.transcoded_at, EXCLUDED.created_by, NOW())
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.class_id,
self.rtc_id,
self.stream_uri,
self.segments as Option<Segments>,
self.modified_segments as Option<Segments>,
self.started_at,
self.adjusted_at,
self.transcoded_at,
self.created_by as AgentId,
)
.fetch_one(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct AdjustWebinarUpdateQuery {
webinar_id: Uuid,
modified_segments: Segments,
}
impl AdjustWebinarUpdateQuery {
pub fn new(webinar_id: Uuid, modified_segments: Segments) -> Self {
Self {
webinar_id,
modified_segments,
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
UPDATE recording
SET modified_segments = $2,
adjusted_at = NOW()
WHERE class_id = $1 AND deleted_at IS NULL
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.webinar_id,
self.modified_segments as Segments,
)
.fetch_one(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct AdjustMinigroupUpdateQuery {
minigroup_id: Uuid,
modified_segments: Segments,
host: AgentId,
}
impl AdjustMinigroupUpdateQuery {
pub fn new(minigroup_id: Uuid, modified_segments: Segments, host: AgentId) -> Self {
Self {
minigroup_id,
modified_segments,
host,
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Vec<Object>> {
sqlx::query_as!(
Object,
r#"
UPDATE recording
SET modified_segments =
CASE
WHEN created_by = $3 THEN $2
ELSE segments
END,
adjusted_at = NOW()
WHERE class_id = $1 AND deleted_at IS NULL
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.minigroup_id,
self.modified_segments as Segments,
self.host as AgentId
)
.fetch_all(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct TranscodingUpdateQuery {
class_id: Uuid,
}
impl TranscodingUpdateQuery {
pub fn new(class_id: Uuid) -> Self {
Self { class_id }
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
UPDATE recording
SET transcoded_at = NOW()
WHERE class_id = $1 AND deleted_at IS NULL
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.class_id,
)
.fetch_one(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct RecordingConvertInsertQuery {
class_id: Uuid,
rtc_id: Uuid,
segments: Segments,
modified_segments: Segments,
stream_uri: String,
created_by: AgentId,
}
impl RecordingConvertInsertQuery {
pub fn new(
class_id: Uuid,
rtc_id: Uuid,
segments: Segments,
modified_segments: Segments,
stream_uri: String,
created_by: AgentId,
) -> Self {
Self {
class_id,
rtc_id,
segments,
modified_segments,
stream_uri,
created_by,
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
INSERT INTO recording (class_id, rtc_id, segments, modified_segments, stream_uri, started_at, adjusted_at, transcoded_at, created_by)
VALUES ($1, $2, $3, $4, $5, NOW(), NOW(), NOW(), $6)
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.class_id,
self.rtc_id,
self.segments as Segments,
self.modified_segments as Segments,
self.stream_uri,
self.created_by as AgentId
)
.fetch_one(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct StreamUploadUpdateQuery {
class_id: Uuid,
rtc_id: Uuid,
segments: Segments,
stream_uri: String,
started_at: DateTime<Utc>,
}
impl StreamUploadUpdateQuery {
pub fn new(
class_id: Uuid,
rtc_id: Uuid,
segments: Segments,
stream_uri: String,
started_at: DateTime<Utc>,
) -> Self {
Self {
class_id,
rtc_id,
segments,
stream_uri,
started_at,
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
UPDATE recording
SET segments = $3,
stream_uri = $4,
started_at = $5
WHERE class_id = $1 AND rtc_id = $2 AND deleted_at IS NULL
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.class_id,
self.rtc_id,
self.segments as Segments,
self.stream_uri,
self.started_at,
)
.fetch_one(conn)
.await
}
}
////////////////////////////////////////////////////////////////////////////////
pub struct DeleteQuery {
class_id: Uuid,
}
impl DeleteQuery {
pub fn new(class_id: Uuid) -> Self {
Self { class_id }
}
pub(crate) async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<usize> {
sqlx::query_as!(
Object,
r#"
UPDATE recording
SET deleted_at = NOW()
WHERE class_id = $1 AND deleted_at IS NULL
"#,
self.class_id,
)
.execute(conn)
.await
.map(|r| r.rows_affected() as usize)
}
}
////////////////////////////////////////////////////////////////////////////////
pub(crate) mod serde {
pub(crate) mod segments {
use super::super::{BoundedOffsetTuples, Segments};
use crate::serde::milliseconds_bound_tuples;
use serde::{de, ser};
pub(crate) fn serialize<S>(value: &Segments, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
let bounded_offset_tuples: BoundedOffsetTuples = value.to_owned().into();
milliseconds_bound_tuples::serialize(&bounded_offset_tuples, serializer)
}
pub(crate) fn deserialize<'de, D>(d: D) -> Result<Segments, D::Error>
where
D: de::Deserializer<'de>,
{
milliseconds_bound_tuples::deserialize(d).map(Segments::from)
}
}
pub(crate) fn segments_option<S>(
opt: &Option<super::Segments>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
match opt {
Some(value) => segments::serialize(value, serializer),
None => serializer.serialize_none(),
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use crate::app::AppContext;
use crate::test_helpers::prelude::*;
#[tokio::test]
async fn test_minigroup_adjust_not_using_deleted_recordings() {
let agent = TestAgent::new("web", "user1", USR_AUDIENCE);
let state = TestState::new(TestAuthz::new()).await;
let mut conn = state.get_conn().await.expect("Failed to fetch connection");
let minigroup = factory::Minigroup::new(
random_string(),
USR_AUDIENCE.to_string(),
(Bound::Unbounded, Bound::Unbounded).into(),
Uuid::new_v4(),
Uuid::new_v4(),
)
.insert(&mut conn)
.await;
// Deleted recording
factory::Recording::new(minigroup.id(), Uuid::new_v4(), agent.agent_id().to_owned())
.deleted_at(Utc::now())
.insert(&mut conn)
.await;
// Actual recording
let recording =
factory::Recording::new(minigroup.id(), Uuid::new_v4(), agent.agent_id().to_owned())
.insert(&mut conn)
.await;
let recordings = AdjustMinigroupUpdateQuery::new(
minigroup.id(),
vec![(Bound::Included(0), Bound::Excluded(1000))].into(),
agent.agent_id().to_owned(),
)
.execute(&mut conn)
.await
.expect("Query failed");
assert_eq!(recordings.len(), 1);
assert_eq!(recordings[0].rtc_id(), recording.rtc_id());
}
pub struct RecordingInsertQuery {
class_id: Uuid,
rtc_id: Uuid,
segments: Option<Segments>,
started_at: Option<DateTime<Utc>>,
stream_uri: Option<String>,
modified_segments: Option<Segments>,
adjusted_at: Option<DateTime<Utc>>,
transcoded_at: Option<DateTime<Utc>>,
created_by: AgentId,
deleted_at: Option<DateTime<Utc>>,
}
impl RecordingInsertQuery {
pub fn new(class_id: Uuid, rtc_id: Uuid, created_by: AgentId) -> Self {
Self {
class_id,
rtc_id,
segments: None,
started_at: None,
stream_uri: None,
modified_segments: None,
adjusted_at: None,
transcoded_at: None,
created_by,
deleted_at: None,
}
}
pub fn modified_segments(self, modified_segments: Segments) -> Self {
Self {
modified_segments: Some(modified_segments),
..self
}
}
pub fn adjusted_at(self, adjusted_at: DateTime<Utc>) -> Self {
Self {
adjusted_at: Some(adjusted_at),
..self
}
}
pub fn transcoded_at(self, transcoded_at: DateTime<Utc>) -> Self {
Self {
transcoded_at: Some(transcoded_at),
..self
}
}
pub fn stream_uri(self, uri: String) -> Self {
Self {
stream_uri: Some(uri),
..self
}
}
pub fn segments(self, segments: Segments) -> Self {
Self {
segments: Some(segments),
..self
}
}
pub fn started_at(self, started_at: DateTime<Utc>) -> Self {
Self {
started_at: Some(started_at),
..self
}
}
pub fn deleted_at(self, deleted_at: DateTime<Utc>) -> Self {
Self {
deleted_at: Some(deleted_at),
..self
}
}
pub async fn execute(self, conn: &mut PgConnection) -> sqlx::Result<Object> {
sqlx::query_as!(
Object,
r#"
INSERT INTO recording (
class_id, rtc_id, stream_uri, segments, modified_segments, started_at, adjusted_at,
transcoded_at, created_by, deleted_at
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
RETURNING
id,
class_id,
rtc_id,
stream_uri,
segments AS "segments!: Option<Segments>",
started_at,
modified_segments AS "modified_segments!: Option<Segments>",
created_at,
adjusted_at,
transcoded_at,
created_by AS "created_by: AgentId",
deleted_at
"#,
self.class_id,
self.rtc_id,
self.stream_uri,
self.segments as Option<Segments>,
self.modified_segments as Option<Segments>,
self.started_at,
self.adjusted_at,
self.transcoded_at,
self.created_by as AgentId,
self.deleted_at
)
.fetch_one(conn)
.await
}
}
}
|
//! This example demonstrates using the [`Merge`] [`TableOption`] to clarify
//! redundancies in a [`Table`] display.
//!
//! * Note how a custom theme is applied to give the [`Merged`](Merge) cells
//! a unique look.
//!
//! * Merge supports both [`Merge::vertical()`] and [`Merge::horizontal()`].
use tabled::{
settings::{
object::{Cell, Columns, Object, Rows},
style::{Border, BorderSpanCorrection, Style},
Merge, Modify,
},
Table, Tabled,
};
fn main() {
let data = [
DatabaseTable::new("database_1", "database_1", "table_1", 10712),
DatabaseTable::new("database_1", "database_1", "table_2", 57),
DatabaseTable::new("database_1", "database_1", "table_3", 57),
DatabaseTable::new("database_2", "", "table_1", 72),
DatabaseTable::new("database_2", "", "table_2", 75),
DatabaseTable::new("database_3", "database_3", "table_1", 20),
DatabaseTable::new("database_3", "", "table_2", 21339),
DatabaseTable::new("database_3", "", "table_3", 141723),
];
let mut table = Table::builder(data).index().transpose().build();
config_theme(&mut table);
table.with(Merge::horizontal()).with(BorderSpanCorrection);
println!("{table}");
}
#[derive(Tabled)]
struct DatabaseTable {
#[tabled(rename = "db")]
db_name: &'static str,
origin_db: &'static str,
#[tabled(rename = "table")]
table_name: &'static str,
total: usize,
}
impl DatabaseTable {
fn new(
db_name: &'static str,
origin_db: &'static str,
table_name: &'static str,
total: usize,
) -> Self {
Self {
db_name,
origin_db,
table_name,
total,
}
}
}
fn config_theme(table: &mut Table) {
table
.with(Style::rounded().remove_vertical())
.with(Modify::new(Columns::first()).with(Border::default().right('│')))
.with(
Modify::new(Cell::new(0, 0)).with(
Border::default()
.corner_top_right('┬')
.corner_bottom_right('┼'),
),
)
.with(
Modify::new(Columns::first().intersect(Rows::last()))
.with(Border::default().corner_bottom_right('┴')),
);
}
|
use super::model::*;
/// SendTransferOptions
///
/// * `threads` - Optionally specify the number of threads to use for PoW. This is ignored if `local_pow` is false.
/// * `inputs` - Optionally specify which inputs to use when trying to find funds for transfers
/// * `reference` - Optionally specify where to start searching for transactions to approve
/// * `remainder_address` - Optionally specify where to send remaining funds after spending from addresses, automatically generated if not specified
/// * `security` - Optioanlly specify the security to use for address generation (1-3). Default is 2
/// * `hmac_key` - Optionally specify an HMAC key to use for this transaction
#[derive(Clone, Debug, Default, PartialEq)]
pub struct SendTransferOptions {
pub threads: Option<usize>,
pub inputs: Option<Inputs>,
pub reference: Option<String>,
pub remainder_address: Option<String>,
pub security: Option<usize>,
pub hmac_key: Option<String>,
}
/// GetNewAddressOptions
///
/// * `security` - Security factor 1-3 with 3 being most secure
/// * `index` - How many iterations of generating to skip
/// * `total` - Number of addresses to generate. If total isn't provided, we generate until we find an unused address
#[derive(Clone, Debug, Default, PartialEq)]
pub struct GetNewAddressOptions {
pub security: Option<usize>,
pub index: Option<usize>,
pub total: Option<usize>,
}
/// SendTrytesOptions
///
/// * `thread` - Optionally specify how many threads to use, defaults to max available
/// * `reference` - Optionally used as the reference to start searching for transactions to approve
#[derive(Clone, Debug, Default, PartialEq)]
pub struct SendTrytesOptions {
pub threads: Option<usize>,
pub reference: Option<String>,
}
/// GetInputsOptions
///
/// * `start` - The start index for addresses to search
/// * `end` - The end index for addresses to search
/// * `threshold` - The amount of Iota you're trying to find in the wallet
/// * `security` - The security to use for address generation
#[derive(Clone, Debug, Default, PartialEq)]
pub struct GetInputsOptions {
pub start: Option<usize>,
pub end: Option<usize>,
pub threshold: Option<i64>,
pub security: Option<usize>,
}
/// PrepareTransfersOptions
///
/// * `inputs` - Optional inputs to use if you're sending iota
/// * `remainder_address` - Optional remainder address to use, if not provided, one will be generated
/// * `security` - Security to use when generating addresses (1-3)
/// * `hmac_key` - Optional key to use if you want to hmac the transfers
#[derive(Clone, Debug, Default, PartialEq)]
pub struct PrepareTransfersOptions {
pub inputs: Option<Inputs>,
pub remainder_address: Option<String>,
pub security: Option<usize>,
pub hmac_key: Option<String>,
}
|
// This file is part of Bit.Country
// Copyright (C) 2020-2021 Bit.Country.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Benchmarks for the nft module.
#![cfg(feature = "runtime-benchmarks")]
use sp_std::prelude::*;
use sp_std::vec;
use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller};
use frame_support::traits::Get;
use frame_system::RawOrigin;
use sp_runtime::traits::{AccountIdConversion, StaticLookup, UniqueSaturatedInto};
pub use crate::Pallet as NFTModule;
pub use crate::*;
use orml_traits::BasicCurrencyExtended;
use primitives::Balance;
pub struct Pallet<T: Config>(crate::Pallet<T>);
pub trait Config: crate::Config + orml_nft::Config + social_currencies::Config {}
const SEED: u32 = 0;
fn dollar(d: u32) -> Balance {
let d: Balance = d.into();
d.saturating_mul(1_000_000_000_000_000_000)
}
benchmarks! {
// create NFT group
create_group{
}: _(RawOrigin::Root ,vec![1], vec![1] )
create_class{
let caller = whitelisted_caller();
let initial_balance = dollar(1000);
<T as social_currencies::Config>::NativeCurrency::update_balance(&caller, initial_balance.unique_saturated_into())?;
crate::Pallet::<T>::create_group(RawOrigin::Root.into(), vec![1],vec![1]);
}: _(RawOrigin::Signed(caller),vec![1], 0u32.into(), TokenType::Transferable, CollectionType::Collectable)
}
|
pub struct Triangle {
x: u64,
y: u64,
z: u64,
}
impl Triangle {
pub fn build(sides: [u64; 3]) -> Option<Triangle> {
match (sides[0], sides[1], sides[2]) {
(0, _, _) => None,
(_, 0_, _) => None,
(_, _, 0) => None,
(x, y, z) if x + y >= z && y + z >= x && x + z >= y => Some(Triangle { x, y, z }),
_ => None,
}
}
pub fn is_equilateral(&self) -> bool {
self.x == self.y && self.y == self.z
}
pub fn is_scalene(&self) -> bool {
!self.is_isosceles()
}
pub fn is_isosceles(&self) -> bool {
self.x == self.y || self.y == self.z || self.x == self.z
}
}
|
use actix_web::{
HttpResponse,
web,
};
use super::super::{
service,
response,
request,
};
pub fn index(payload: web::Query<request::job::Index>) -> HttpResponse {
let domain_jobs = &service::job::index(
payload.page,
payload.page_size,
);
response::job_index::response(domain_jobs)
}
pub fn show(payload: web::Query<request::job::Show>) -> HttpResponse {
let domain_job = &service::job::show(payload.job_id);
response::job_show::response(domain_job)
} |
use serde::{Deserialize, Serialize};
use serde_yaml::from_str;
use std::fmt::{self, Display, Formatter};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Template {
pub image: String,
pub name: String,
pub description: String,
pub public: bool,
pub runtime: Option<RuntimeConfiguration>,
}
impl Template {
pub fn parse(s: &str) -> Result<Self, String> {
from_str(s).map_err(|err| format!("{}", err))
}
}
impl Display for Template {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
serde_yaml::to_string(self).map_err(|_| fmt::Error {})?
)
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RuntimeConfiguration {
pub env: Option<Vec<NameValuePair>>,
pub ports: Option<Vec<Port>>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct NameValuePair {
pub name: String,
pub value: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Port {
pub name: String,
pub protocol: Option<String>,
pub path: String,
pub port: i32,
pub target: Option<i32>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct NameURLPair {
pub name: String,
pub url: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Command {
pub name: String,
pub run: String,
pub working_directory: String,
}
|
use std::path::Path;
use std::fs::File;
use std::io::Read;
use yaml_rust::{Yaml, YamlLoader};
use cocaine::service::tvm::Grant;
pub const GATHER_INTERVAL_SECS: u64 = 60;
#[derive(Debug)]
pub struct Config {
pub gather_interval: u64,
pub ticket_expire_sec: Option<i64>,
pub secure: Option<Secure>,
}
#[derive(Debug, Clone)]
pub struct Secure {
pub md: String,
pub client_id: i64,
pub client_secret: String,
pub grant: Option<Grant>,
}
#[derive(Debug)]
struct Builder {
config: Config
}
fn yaml_from_file(path: &str) -> Option<Vec<Yaml>> {
if !Path::new(path).is_file() {
println!("file not exist {}", path);
return None
}
if let Ok(mut fl) = File::open(path) {
let mut content = String::new();
match fl.read_to_string(&mut content) {
Ok(_usize) => YamlLoader::load_from_str(&content).ok(),
Err(_) => None
}
} else {
None
}
}
impl Config {
fn new_with_defaults() -> Config {
Config{
gather_interval: GATHER_INTERVAL_SECS,
ticket_expire_sec: Some(600),
secure: None
}
}
pub fn _new_from_default_files(files: &[&str]) -> Config {
Self::new_from_files(files)
}
pub fn new_from_files(paths: &[&str]) -> Config {
let mut builder = Builder::new();
for file in paths {
println!("checking for config: {}", file);
if let Some(extension) = Path::new(file)
.extension()
.and_then(|ext| ext.to_str())
{
match extension {
"yaml" | "yml" =>
if let Some(yaml) = yaml_from_file(file) {
builder.update_from_yaml(yaml);
},
"toml" | "tml" => println!("toml format not implemented: {}", file),
_ => println!("unsupported config format: {}", file)
};
}
}
builder.build()
}
}
impl Secure {
pub fn get_mod(&self) -> String {
self.md.clone()
}
}
impl Builder {
fn new() -> Builder {
Builder{ config: Config::new_with_defaults() }
}
fn add_secure(&mut self, md: String, client_id: i64, client_secret: String, grant: Option<Grant>) -> &mut Self {
self.config.secure = Some(Secure{md, client_id, client_secret, grant});
self
}
fn build(self) -> Config {
self.config
}
fn update_from_yaml(&mut self, yaml: Vec<Yaml>) -> &Self {
fn str_to_yaml(s: &str) -> Yaml {
Yaml::from_str(s)
}
for yaml in yaml {
// update secure section
yaml.as_hash()
.and_then(|tb| tb.get(&str_to_yaml("secure")))
.and_then(|tb| tb.as_hash())
.and_then::<Yaml,_>(|tb| {
let md = tb.get(&str_to_yaml("mod")).and_then(|v| v.as_str());
let client_id = tb.get(&str_to_yaml("client_id")).and_then(|v| v.as_i64());
let client_secret = tb.get(&str_to_yaml("client_secret")).and_then(|v| v.as_str());
if let (Some(md), Some(client_id), Some(client_secret)) = (md, client_id, client_secret) {
let md = String::from(md);
let client_secret = String::from(client_secret);
self.add_secure(md, client_id, client_secret, None);
};
None
});
} // for yaml in yaml::Array
self
}
}
|
use crate::headers::from_headers::*;
use crate::prelude::*;
use crate::resources::Database;
use crate::ResourceQuota;
use azure_core::headers::{continuation_token_from_headers_optional, session_token_from_headers};
use azure_core::{collect_pinned_stream, prelude::*, Request, Response};
use chrono::{DateTime, Utc};
#[derive(Debug, Clone)]
pub struct ListDatabasesOptions {
consistency_level: Option<ConsistencyLevel>,
max_item_count: MaxItemCount,
}
impl ListDatabasesOptions {
pub fn new() -> Self {
Self {
consistency_level: None,
max_item_count: MaxItemCount::new(-1),
}
}
setters! {
consistency_level: ConsistencyLevel => Some(consistency_level),
max_item_count: i32 => MaxItemCount::new(max_item_count),
}
pub async fn decorate_request(&self, request: &mut Request) -> crate::Result<()> {
azure_core::headers::add_optional_header2(&self.consistency_level, request)?;
azure_core::headers::add_mandatory_header2(&self.max_item_count, request)?;
Ok(())
}
}
#[derive(Clone, PartialEq, PartialOrd, Debug)]
pub struct ListDatabasesResponse {
pub rid: String,
pub databases: Vec<Database>,
pub count: u32,
pub activity_id: uuid::Uuid,
pub charge: f64,
pub session_token: String,
pub last_state_change: DateTime<Utc>,
pub resource_quota: Vec<ResourceQuota>,
pub resource_usage: Vec<ResourceQuota>,
pub schema_version: String,
pub service_version: String,
pub continuation_token: Option<String>,
pub gateway_version: String,
}
impl ListDatabasesResponse {
// TODO: To remove pragma when list_databases has been re-enabled
#[allow(dead_code)]
pub(crate) async fn try_from(response: Response) -> crate::Result<Self> {
let (_status_code, headers, pinned_stream) = response.deconstruct();
let body = collect_pinned_stream(pinned_stream).await?;
#[derive(Deserialize, Debug)]
pub struct Response {
#[serde(rename = "_rid")]
rid: String,
#[serde(rename = "Databases")]
pub databases: Vec<Database>,
#[serde(rename = "_count")]
pub count: u32,
}
let response: Response = serde_json::from_slice(&body)?;
Ok(Self {
rid: response.rid,
databases: response.databases,
count: response.count,
charge: request_charge_from_headers(&headers)?,
activity_id: activity_id_from_headers(&headers)?,
session_token: session_token_from_headers(&headers)?,
last_state_change: last_state_change_from_headers(&headers)?,
resource_quota: resource_quota_from_headers(&headers)?,
resource_usage: resource_usage_from_headers(&headers)?,
schema_version: schema_version_from_headers(&headers)?.to_owned(),
service_version: service_version_from_headers(&headers)?.to_owned(),
continuation_token: continuation_token_from_headers_optional(&headers)?,
gateway_version: gateway_version_from_headers(&headers)?.to_owned(),
})
}
}
impl IntoIterator for ListDatabasesResponse {
type Item = Database;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.databases.into_iter()
}
}
|
use std::ffi::CStr;
use ash::version::DeviceV1_0;
use ash::vk;
use crate::vulkan::descriptor::DescriptorSetLayout;
use crate::vulkan::pipeline_layout::PipelineLayout;
use crate::vulkan::shader_module::ShaderModule;
use crate::vulkan::{Device, VkError};
pub struct ComputePipeline {
set_layouts: Vec<DescriptorSetLayout>,
layout: PipelineLayout,
pipeline: vk::Pipeline,
}
impl ComputePipeline {
#[inline]
pub fn descriptor_set_layouts(&self) -> &[DescriptorSetLayout] {
&self.set_layouts
}
#[inline]
pub fn layout(&self) -> &PipelineLayout {
&self.layout
}
#[inline]
pub fn handle(&self) -> vk::Pipeline {
self.pipeline
}
pub unsafe fn new(
device: &Device,
shader_module: &ShaderModule,
set_layouts: Vec<DescriptorSetLayout>,
push_constants: &[vk::PushConstantRange],
) -> Result<Self, VkError> {
let mut layout = match PipelineLayout::new(device, &set_layouts, &push_constants) {
Ok(l) => l,
Err(e) => {
set_layouts.into_iter().for_each(|mut l| l.destroy(device));
return Err(e);
}
};
let entry_name = CStr::from_bytes_with_nul(b"main\0").unwrap();
let stage = vk::PipelineShaderStageCreateInfo::builder()
.stage(vk::ShaderStageFlags::COMPUTE)
.module(shader_module.handle())
.name(entry_name)
.build();
let info = vk::ComputePipelineCreateInfo::builder()
.layout(layout.handle())
.stage(stage)
.build();
let infos = [info];
let pipelines_result =
device.create_compute_pipelines(vk::PipelineCache::null(), &infos, None);
let pipeline = match pipelines_result {
Ok(p) => p[0],
Err((_, e)) => {
layout.destroy(device);
set_layouts.into_iter().for_each(|mut l| l.destroy(device));
return Err(VkError {
code: e,
vk_fn:
"device.create_compute_pipelines(vk::PipelineCache::null(), &infos, None)",
module: module_path!(),
file: file!(),
line_and_column: (line!(), column!()),
});
}
};
Ok(Self {
set_layouts,
layout,
pipeline,
})
}
pub unsafe fn destroy(&mut self, device: &Device) {
self.set_layouts.iter_mut().for_each(|l| l.destroy(device));
self.layout.destroy(device);
device.destroy_pipeline(self.pipeline, None);
}
}
|
use std::fmt;
#[derive(Debug, PartialEq, Clone)]
pub enum AST {
File(Vec<AST>),
Error,
Expr(Expression),
Import(String),
Record(String, Option<Vec<String>>, Vec<PrimitiveType>),
Typedef {
name: String,
type_names: Option<Vec<String>>,
variants: Option<Vec<TypeKind>>,
},
Constant {
ty: PrimitiveType,
export: bool,
value: Box<Expression>,
},
Global {
ty: PrimitiveType,
export: bool,
value: Box<Expression>,
},
}
#[derive(Debug, PartialEq, Clone)]
pub enum TypeKind {
Newtype(PrimitiveType),
Variant(String, Option<Vec<PrimitiveType>>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum Expression {
Atom(AtomKind),
Binary(Box<Expression>, String, Box<Expression>),
Block(Vec<Expression>),
Ret(Box<Expression>),
Decl {
ty: PrimitiveType,
value: Option<Box<Expression>>,
exterior_bind: BindType,
},
Assign {
lval: LValue,
value: Box<Expression>,
},
Call {
name: String,
types: Vec<PrimitiveType>,
args: Vec<Expression>,
},
Function {
name: String,
ty: FunctionType,
body: Option<Box<Expression>>,
attributes: Vec<String>,
type_names: Vec<String>,
export: bool,
},
If {
cond_expr: Box<Expression>,
then_expr: Box<Expression>,
else_expr: Option<Box<Expression>>,
is_expr: bool,
},
RecordInit {
record: String,
types: Vec<PrimitiveType>,
values: Vec<AtomKind>,
},
Loop {
loop_type: LoopType,
cond_expr: Box<Expression>,
body: Box<Expression>,
},
}
#[derive(PartialEq, Clone)]
pub enum LValue {
Ident(String),
Accessor(Vec<String>),
Deref(Box<LValue>),
Index(Box<LValue>, Box<Expression>),
}
#[derive(Debug, PartialEq, Clone)]
pub enum BindType {
Mutable,
Immutable,
Constant,
}
#[derive(Debug, PartialEq, Clone)]
pub enum LoopType {
While,
Until,
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct FunctionType {
pub ext: bool,
pub ret_ty: Box<PrimitiveType>,
pub params: Vec<PrimitiveType>,
}
impl From<FunctionType> for PrimitiveType {
fn from(func: FunctionType) -> Self {
PrimitiveType::Function(func)
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum PrimitiveType {
Unit,
Str,
Varargs,
Bool,
Char,
Int(IntSize),
UInt(IntSize),
Float(FloatSize),
TypeRef(String),
GenericTypeRef(String, Vec<PrimitiveType>),
Newtype(String),
VariantType(String),
Variant {
parent: String,
variant: String,
},
Function(FunctionType),
NamedType {
name: String,
ty: Option<Box<PrimitiveType>>,
},
Ref(Box<PrimitiveType>),
Pointer(Box<PrimitiveType>),
Array {
ty: Box<PrimitiveType>,
len: u32,
},
Slice {
ty: Box<PrimitiveType>,
},
Symbol(String),
Box(Box<PrimitiveType>),
}
impl PrimitiveType {
pub fn inner(&self) -> &PrimitiveType {
match self {
PrimitiveType::NamedType {
name: _,
ty: Some(box ty),
} => ty,
PrimitiveType::Function(..) => panic!("ICE primty inner {:?}", self),
other => other,
}
}
pub fn ext(&self) -> bool {
match self {
PrimitiveType::Function(FunctionType {
ext,
ret_ty: _,
params: _,
}) => ext.clone(),
_ => panic!("ICE PrimitiveType ext {:?}", self),
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum AtomKind {
Unit,
SymbolLiteral(String),
Bool(Boolean),
Integer(u64, PrimitiveType),
Floating(f64, PrimitiveType),
StringLiteral(String),
CharLiteral(char),
Expr(Box<Expression>),
Parenthesized(Box<Expression>),
Unary(String, Box<AtomKind>),
Ident(String),
Access(Vec<String>),
Deref(Box<AtomKind>),
Ref(Box<AtomKind>),
Index(Box<AtomKind>, Box<Expression>),
Cast(Box<AtomKind>, PrimitiveType),
Array(Vec<Expression>),
Is(Box<AtomKind>, String),
NamedValue { name: String, val: Box<Expression> },
}
#[derive(Debug, PartialEq, Clone, Eq)]
pub enum Boolean {
True,
False,
}
#[derive(Debug, PartialEq, Clone, PartialOrd, Eq, Hash)]
pub enum IntSize {
Bits8,
Bits16,
Bits32,
Bits64,
}
impl From<usize> for IntSize {
fn from(size: usize) -> IntSize {
match size {
1 => IntSize::Bits8,
2 => IntSize::Bits16,
4 => IntSize::Bits32,
8 => IntSize::Bits64,
_ => unreachable!(),
}
}
}
#[derive(Debug, PartialEq, Clone, Eq, Hash)]
pub enum FloatSize {
Bits32,
Bits64,
}
impl fmt::Debug for LValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LValue::Ident(name) => write!(f, "{}", name),
LValue::Accessor(parts) => write!(f, "{}", parts.join(".")),
LValue::Deref(lval) => write!(f, "*{:?}", lval),
LValue::Index(what, _) => write!(f, "{:?}[]", what),
}
}
}
impl From<&LValue> for String {
fn from(lval: &LValue) -> String {
match lval {
LValue::Ident(name) => name.clone(),
LValue::Accessor(parts) => parts.join("."),
LValue::Deref(lval) => format!("*{:?}", lval),
LValue::Index(what, _) => format!("{:?}[]", what),
}
}
}
|
use super::{ConnectionError, Request, Subscribe, Subscription};
use crate::rpc::{Rpc, RpcResponse, SubscriptionRequest};
use serde::de::DeserializeOwned;
pub struct Connection<T: Request> {
pub(super) transport: T, // subscription uses this field
id_pool: std::collections::VecDeque<usize>,
}
impl<T> Connection<T>
where
T: Request,
{
pub fn new(transport: T) -> Self {
Self {
transport,
id_pool: (0..1000).collect(),
}
}
pub fn call<U>(&mut self, mut rpc: Rpc<U>) -> Result<U, ConnectionError>
where
U: DeserializeOwned + std::fmt::Debug,
{
if let Some(id) = self.id_pool.pop_front() {
rpc.id = id;
self.id_pool.push_back(id);
let result_data = self.transport.request(
serde_json::to_string(&rpc).map_err(|e| ConnectionError::Serde(e.to_string()))?,
)?;
let result = serde_json::from_str::<RpcResponse<U>>(&result_data)
.map_err(|e| ConnectionError::Serde(e.to_string()))?;
Ok(result.result)
} else {
Err(ConnectionError::NoTicketId)
}
}
}
impl<T> Connection<T>
where
T: Request + Subscribe,
{
/// Starts a new subscription.
/// Use one of these rpc generating [functions](crate::rpc::sub) to provide the subscription request.
/// Returns a [subscription](Subscription) which you can poll for new items.
pub fn subscribe<U: DeserializeOwned + std::fmt::Debug>(
&mut self,
sub_request: SubscriptionRequest<U>,
) -> Result<Subscription<T, U>, ConnectionError> {
let mut connection = Connection {
transport: self.transport.fork()?,
id_pool: self.id_pool.clone(),
};
let subscription_id = connection.call(sub_request.rpc)?;
Ok(Subscription {
id: subscription_id,
connection,
result_type: std::marker::PhantomData,
})
}
}
|
use SafeWrapper;
use ir::{User, Instruction, Value, TerminatorInst, Block};
use sys;
/// An indirect branch.
pub struct IndirectBrInst<'ctx>(TerminatorInst<'ctx>);
impl<'ctx> IndirectBrInst<'ctx>
{
/// Creates a new indirect branch.
pub fn new(address: &Value,
destinations: &[&Block]) -> Self {
let mut br = unsafe {
let inner = sys::LLVMRustCreateIndirectBrInst(address.inner());
wrap_value!(inner => User => Instruction => TerminatorInst => IndirectBrInst)
};
for dest in destinations { br.add_destination(dest) }
br
}
/// Adds a possible destination block.
pub fn add_destination(&mut self, block: &Block) {
unsafe { sys::LLVMRustIndirectBrInstAddDestination(self.inner(), block.inner()) }
}
}
impl_subtype!(IndirectBrInst => TerminatorInst);
|
// General
//pub mod bigint;
pub mod aggregate;
pub mod carbon;
pub mod config;
pub mod consul;
pub mod errors;
pub mod management;
pub mod peer;
pub mod raft;
pub mod server;
pub mod stats;
pub mod task;
pub mod udp;
pub mod util;
use std::collections::HashMap;
use std::io;
use std::str::FromStr;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use std::{panic, process, thread};
use slog::{info, o};
use futures::future::empty;
use futures::{Future as Future1, IntoFuture};
use futures3::channel::mpsc;
use futures3::stream::StreamExt;
use futures3::future::{pending, TryFutureExt};
use serde_derive::{Deserialize, Serialize};
use slog::{error, warn};
use tokio2::runtime::Builder;
use tokio2::time::interval_at;
use tokio::runtime::current_thread::Runtime;
use tokio::timer::Delay;
use bioyino_metric::metric::Metric;
use bioyino_metric::name::MetricName;
use once_cell::sync::Lazy;
use crate::carbon::carbon_timer;
use crate::config::{Command, Consul, Network, System};
use crate::consul::ConsulConsensus;
use crate::errors::GeneralError;
use crate::management::MgmtClient;
use crate::peer::{NativeProtocolServer, NativeProtocolSnapshot};
use crate::raft::start_internal_raft;
pub use crate::stats::OwnStats;
use crate::task::TaskRunner;
use crate::udp::{start_async_udp, start_sync_udp};
use crate::util::{retry_with_backoff, setup_logging, try_resolve, Backoff};
// floating type used all over the code, can be changed to f32, to use less memory at the price of
// precision
// TODO: make in into compilation feature
pub type Float = f64;
// a type to store pre-aggregated data
pub type Cache = HashMap<MetricName, Metric<Float>>;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub enum ConsensusState {
Enabled,
Paused,
Disabled,
}
impl FromStr for ConsensusState {
type Err = GeneralError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"enabled" | "enable" => Ok(ConsensusState::Enabled),
"disabled" | "disable" => Ok(ConsensusState::Disabled),
"pause" | "paused" => Ok(ConsensusState::Paused),
_ => Err(GeneralError::UnknownState),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub enum ConsensusKind {
None,
Consul,
Internal,
}
pub static CONSENSUS_STATE: Lazy<Mutex<ConsensusState>> = Lazy::new(|| Mutex::new(ConsensusState::Disabled));
pub static IS_LEADER: AtomicBool = AtomicBool::new(false);
fn main() {
let (system, command) = System::load().expect("loading config");
let config = system.clone();
#[allow(clippy::unneeded_field_pattern)]
let System {
verbosity_console,
verbosity_syslog,
daemon,
network:
Network {
listen,
peer_listen,
peer_client_bind,
mgmt_listen,
bufsize,
multimessage,
mm_packets,
mm_async,
mm_timeout,
buffer_flush_time,
buffer_flush_length: _,
greens,
async_sockets,
nodes,
snapshot_interval,
max_snapshots,
},
raft,
consul:
Consul {
start_as: consul_start_as,
agent,
session_ttl: consul_session_ttl,
renew_time: consul_renew_time,
key_name: consul_key,
},
metrics: _,
aggregation,
naming,
carbon,
n_threads,
w_threads,
c_threads,
stats_interval: s_interval,
task_queue_size,
start_as_leader,
stats_prefix,
consensus,
} = system;
if daemon && verbosity_syslog.is_off() {
eprintln!("syslog is disabled, while daemon mode is on, no logging will be performed");
}
// Since daemonizing closes all opened resources, inclusing syslog, we mut do it before everything else
if daemon {
let result = unsafe { libc::daemon(0, 0) };
if result < 0 {
println!("daemonize failed: {}", io::Error::last_os_error());
std::process::exit(1);
}
}
let rlog = setup_logging(daemon, verbosity_console, verbosity_syslog);
// this lets root logger live as long as it needs
let _guard = slog_scope::set_global_logger(rlog.clone());
slog_stdlog::init().unwrap();
let mut runtime = Builder::new()
.thread_name("bioyino_main")
.threaded_scheduler()
.core_threads(c_threads)
.enable_all()
.build()
.expect("creating runtime for main thread");
if let Command::Query(command, dest) = command {
let dest = try_resolve(&dest);
let command = MgmtClient::new(rlog.clone(), dest, command);
runtime.block_on(command.run()).unwrap_or_else(|e| {
warn!(rlog,
"error sending command";
"dest"=>format!("{}", &dest),
"error"=> format!("{}", e),
)
});
return;
}
let config = Arc::new(config);
let log = rlog.new(o!("thread" => "main"));
// To avoid strange side effects, like blocking the whole process in unknown state
// we prefer to exit after any panic in any thread
// So we set the panic hook to exit
let orig_hook = panic::take_hook();
panic::set_hook(Box::new(move |panic_info| {
// invoke the default handler and exit the process
orig_hook(panic_info);
process::exit(42);
}));
// Start counting threads
info!(log, "starting counting threads");
let mut chans = Vec::with_capacity(w_threads);
for i in 0..w_threads {
let (tx, mut rx) = mpsc::channel(task_queue_size);
chans.push(tx);
let tlog = log.clone();
let cf = config.clone();
thread::Builder::new()
.name(format!("bioyino_cnt{}", i))
.spawn(move || {
let mut runner = TaskRunner::new(tlog, cf, 8192);
let mut runtime = Builder::new().basic_scheduler().enable_all().build().expect("creating runtime for test");
let tasks = async {
while let Some(task) = rx.next().await {
runner.run(task);
}
};
runtime.block_on(tasks);
})
.map_err(|e| error!(log, "worker thread dead: {:?}", e))
.expect("starting counting worker thread");
}
let own_stat_log = log.clone();
let stats_prefix = stats_prefix.trim_end_matches('.').to_string();
// Spawn future gatering bioyino own stats
info!(own_stat_log, "starting own stats counter");
let own_stats = OwnStats::new(s_interval, stats_prefix, chans.clone(), own_stat_log);
runtime.spawn(async { own_stats.run().await });
let compat_log = rlog.new(o!("thread" => "compat"));
let snap_err_log = compat_log.clone();
// TODO: unfortunately, the old entities are using timers and/or older tokio spawn function
// therefore they are incompatible between old and new tokio runtimes, even using `compat`
// adaptor trait
//
// this thread cen be eliminated after migrating sender and receiver to latest capnp and std
// futures
thread::Builder::new()
.name("bioyino_compat".into())
.spawn(move || {
let mut runtime = Runtime::new().expect("creating runtime for counting worker");
// Init leader state before starting backend
IS_LEADER.store(start_as_leader, Ordering::SeqCst);
let consensus_log = compat_log.clone();
match consensus {
ConsensusKind::Internal => {
let log = compat_log.clone();
let flog = compat_log.clone();
thread::Builder::new()
.name("bioyino_raft".into())
.spawn(move || {
let mut runtime = Runtime::new().expect("creating runtime for raft thread");
if start_as_leader {
warn!(
log,
"Starting as leader with enabled consensus. More that one leader is possible before consensus settle up."
);
}
let d = Delay::new(Instant::now() + Duration::from_millis(raft.start_delay));
let log = log.clone();
let delayed = d.map_err(|_| ()).and_then(move |_| {
let mut con_state = CONSENSUS_STATE.lock().unwrap();
*con_state = ConsensusState::Enabled;
info!(log, "starting internal consensus"; "initial_state"=>format!("{:?}", *con_state));
start_internal_raft(raft, consensus_log);
Ok(())
});
runtime.spawn(delayed);
runtime.block_on(empty::<(), ()>()).expect("raft thread failed");
info!(flog, "consensus thread stopped");
})
.expect("starting counting worker thread");
}
ConsensusKind::Consul => {
warn!(
compat_log,
"CONSUL CONSENSUS IS DEPRECATED AND WILL BE REMOVED IN VERSION 0.8, CONSIDER USING BUILT IN RAFT"
);
if start_as_leader {
warn!(
compat_log,
"Starting as leader with enabled consensus. More that one leader is possible before consensus settle up."
);
}
{
let mut con_state = CONSENSUS_STATE.lock().unwrap();
info!(compat_log, "starting consul consensus"; "initial_state"=>format!("{:?}", con_state));
*con_state = consul_start_as;
}
let mut consensus = ConsulConsensus::new(&consensus_log, agent, consul_key);
consensus.set_session_ttl(Duration::from_millis(consul_session_ttl as u64));
consensus.set_renew_time(Duration::from_millis(consul_renew_time as u64));
runtime.spawn(consensus.into_future().map_err(|_| ()));
}
ConsensusKind::None => {
if !start_as_leader {
// starting as non-leader in this mode can be useful for agent mode
// so we don't disorient user with warnings
info!(
compat_log,
"Starting as non-leader with disabled consensus. No metrics will be sent until leader is switched on by command"
);
}
}
}
runtime.block_on(empty::<(), ()>()).expect("compat thread failed");
})
.expect("starting compat thread");
// settings safe for asap restart
info!(log, "starting snapshot receiver");
let peer_server_bo = Backoff {
delay: 1,
delay_mul: 1f32,
delay_max: 1,
retries: ::std::usize::MAX,
};
let server_chans = chans.clone();
let server_log = log.clone();
let peer_server = retry_with_backoff(peer_server_bo, move || {
let server_log = server_log.clone();
let peer_server = NativeProtocolServer::new(server_log.clone(), peer_listen, server_chans.clone());
peer_server.run().inspect_err(move |e| {
info!(server_log, "error running snapshot server"; "error"=>format!("{}", e));
})
});
runtime.spawn(peer_server);
info!(log, "starting snapshot sender");
let snapshot = NativeProtocolSnapshot::new(
&log,
nodes,
peer_client_bind,
Duration::from_millis(snapshot_interval as u64),
&chans,
max_snapshots,
);
runtime.spawn(snapshot.run().map_err(move |e| {
s!(peer_errors);
info!(snap_err_log, "error running snapshot sender"; "error"=>format!("{}", e));
}));
info!(log, "starting management server");
let m_serv_log = rlog.clone();
let m_server = async move { hyper13::Server::bind(&mgmt_listen).serve(management::MgmtServer(m_serv_log, mgmt_listen)).await };
runtime.spawn(m_server);
info!(log, "starting carbon backend");
let carbon_log = log.clone();
let carbon_t = carbon_timer(log.clone(), carbon, aggregation, naming, chans.clone())
.map_err(move |e| error!(carbon_log, "running carbon thread"; "error" => format!("{}", e)));
runtime.spawn(carbon_t);
// For each out sync thread we create the buffer flush timer, that sets the atomic value to 1
// every interval
let mut flush_flags = Arc::new(Vec::new());
if let Some(flags) = Arc::get_mut(&mut flush_flags) {
for _ in 0..n_threads {
flags.push(AtomicBool::new(false));
}
}
if buffer_flush_time > 0 {
let flags = flush_flags.clone();
let flush_timer = async move {
let dur = Duration::from_millis(buffer_flush_time);
let mut timer = interval_at(tokio2::time::Instant::now() + dur, dur);
loop {
timer.tick().await;
info!(rlog, "buffer flush requested");
flags.iter().map(|flag| flag.swap(true, Ordering::SeqCst)).last();
}
};
runtime.spawn(flush_timer);
}
if multimessage {
start_sync_udp(
log,
listen,
&chans,
config.clone(),
n_threads,
bufsize,
mm_packets,
mm_async,
mm_timeout,
flush_flags.clone(),
);
} else {
start_async_udp(
log,
listen,
&chans,
config.clone(),
n_threads,
greens,
async_sockets,
bufsize,
flush_flags.clone(),
);
}
runtime.block_on(pending::<()>());
}
|
#[doc = "Register `PCROP2ASR` reader"]
pub type R = crate::R<PCROP2ASR_SPEC>;
#[doc = "Register `PCROP2ASR` writer"]
pub type W = crate::W<PCROP2ASR_SPEC>;
#[doc = "Field `PCROP2A_STRT` reader - PCROP2A area start offset, bank2"]
pub type PCROP2A_STRT_R = crate::FieldReader<u16>;
#[doc = "Field `PCROP2A_STRT` writer - PCROP2A area start offset, bank2"]
pub type PCROP2A_STRT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 9, O, u16>;
impl R {
#[doc = "Bits 0:8 - PCROP2A area start offset, bank2"]
#[inline(always)]
pub fn pcrop2a_strt(&self) -> PCROP2A_STRT_R {
PCROP2A_STRT_R::new((self.bits & 0x01ff) as u16)
}
}
impl W {
#[doc = "Bits 0:8 - PCROP2A area start offset, bank2"]
#[inline(always)]
#[must_use]
pub fn pcrop2a_strt(&mut self) -> PCROP2A_STRT_W<PCROP2ASR_SPEC, 0> {
PCROP2A_STRT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Flash PCROP2 area A start address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pcrop2asr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pcrop2asr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PCROP2ASR_SPEC;
impl crate::RegisterSpec for PCROP2ASR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pcrop2asr::R`](R) reader structure"]
impl crate::Readable for PCROP2ASR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pcrop2asr::W`](W) writer structure"]
impl crate::Writable for PCROP2ASR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PCROP2ASR to value 0xffff_ffff"]
impl crate::Resettable for PCROP2ASR_SPEC {
const RESET_VALUE: Self::Ux = 0xffff_ffff;
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Rust executor possible errors.
use sp_serializer;
use wasmi;
/// Result type alias.
pub type Result<T> = std::result::Result<T, Error>;
/// Error type.
#[derive(Debug, derive_more::Display, derive_more::From)]
pub enum Error {
/// Unserializable Data
InvalidData(sp_serializer::Error),
/// Trap occurred during execution
Trap(wasmi::Trap),
/// Wasmi loading/instantiating error
Wasmi(wasmi::Error),
/// Error in the API. Parameter is an error message.
#[from(ignore)]
ApiError(String),
/// Method is not found
#[display(fmt = "Method not found: '{}'", _0)]
#[from(ignore)]
MethodNotFound(String),
/// Code is invalid (expected single byte)
#[display(fmt = "Invalid Code: {}", _0)]
#[from(ignore)]
InvalidCode(String),
/// Could not get runtime version.
#[display(fmt = "On-chain runtime does not specify version")]
VersionInvalid,
/// Externalities have failed.
#[display(fmt = "Externalities error")]
Externalities,
/// Invalid index.
#[display(fmt = "Invalid index provided")]
InvalidIndex,
/// Invalid return type.
#[display(fmt = "Invalid type returned (should be u64)")]
InvalidReturn,
/// Runtime failed.
#[display(fmt = "Runtime error")]
Runtime,
/// Runtime panicked.
#[display(fmt = "Runtime panicked: {}", _0)]
#[from(ignore)]
RuntimePanicked(String),
/// Invalid memory reference.
#[display(fmt = "Invalid memory reference")]
InvalidMemoryReference,
/// The runtime must provide a global named `__heap_base` of type i32 for specifying where the
/// allocator is allowed to place its data.
#[display(fmt = "The runtime doesn't provide a global named `__heap_base`")]
HeapBaseNotFoundOrInvalid,
/// The runtime WebAssembly module is not allowed to have the `start` function.
#[display(fmt = "The runtime has the `start` function")]
RuntimeHasStartFn,
/// Some other error occurred
Other(String),
/// Some error occurred in the allocator
#[display(fmt = "Error in allocator: {}", _0)]
Allocator(sp_allocator::Error),
/// Execution of a host function failed.
#[display(fmt = "Host function {} execution failed with: {}", _0, _1)]
FunctionExecution(String, String),
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::InvalidData(ref err) => Some(err),
Error::Trap(ref err) => Some(err),
Error::Wasmi(ref err) => Some(err),
_ => None,
}
}
}
impl wasmi::HostError for Error {}
impl From<&'static str> for Error {
fn from(err: &'static str) -> Error {
Error::Other(err.into())
}
}
impl From<WasmError> for Error {
fn from(err: WasmError) -> Error {
Error::Other(err.to_string())
}
}
/// Type for errors occurring during Wasm runtime construction.
#[derive(Debug, derive_more::Display)]
pub enum WasmError {
/// Code could not be read from the state.
CodeNotFound,
/// Failure to reinitialize runtime instance from snapshot.
ApplySnapshotFailed,
/// Failure to erase the wasm memory.
///
/// Depending on the implementation might mean failure of allocating memory.
ErasingFailed(String),
/// Wasm code failed validation.
InvalidModule,
/// Wasm code could not be deserialized.
CantDeserializeWasm,
/// The module does not export a linear memory named `memory`.
InvalidMemory,
/// The number of heap pages requested is disallowed by the module.
InvalidHeapPages,
/// Instantiation error.
Instantiation(String),
/// Other error happenend.
Other(String),
}
|
use super::regex::Regex;
use super::*;
fn annotate_words(line: &str) -> Vec<(usize, &str)> {
line.split(" ")
.scan(0, |acc, word| {
let res = Some((*acc, word));
*acc = *acc + word.len() + 1;
res
})
.filter(|(_, word)| !word.is_empty())
.collect::<Vec<(usize, &str)>>()
}
pub struct Dups {}
impl Dups {
pub fn new() -> Dups {
Dups {}
}
}
impl Checker for Dups {
fn name(&self) -> String {
"DUPLICATES".to_owned()
}
fn matches<'a>(&self, file_content: &'a Vec<String>) -> Vec<LineMatches<'a>> {
let re = Regex::new(r"(\W+)").unwrap();
let mut matches: Vec<LineMatches> = vec![];
for (line_idx, line) in file_content.iter().enumerate() {
let mut last_word = (0, "");
let words = annotate_words(line);
for (cursor_pos, word) in words {
let mut lm = LineMatches::new(line_idx + 1, line);
if last_word.1 == word && !re.is_match(word) {
lm.add_match((last_word.0, cursor_pos + word.len()));
matches.push(lm);
}
last_word = (cursor_pos, word);
}
}
matches
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple_word_annotation() {
let line = "large fox jumping over";
let words = annotate_words(line);
assert_eq!(words, vec![(0, "large"), (6, "fox"), (10, "jumping"), (18, "over")]);
}
#[test]
fn word_annotation_empty_words() {
let line = "large fox";
let words = annotate_words(line);
assert_eq!(words, vec![(0, "large"), (9, "fox")]);
}
}
|
use crate::helpers;
use crate::prelude::*;
use crate::runtime;
use crate::types::TransferFileMeta;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::fs::File;
use tokio::net::TcpStream;
pub fn send_file(mut cx: FunctionContext) -> JsResult<JsUndefined> {
let config = cx.argument::<JsObject>(0)?;
let ref_id = config.string(&mut cx, "refId")?;
let ip = config.string(&mut cx, "ip")?;
let port = config.number(&mut cx, "port")?;
let file_path = config.string(&mut cx, "filePath")?;
let on_send_file_start = config.callback(&mut cx, "onSendFileStart")?;
let on_send_file_progress = config.callback(&mut cx, "onSendFileProgress")?;
let on_send_file_complete = config.callback(&mut cx, "onSendFileComplete")?;
let on_send_file_error = config.callback(&mut cx, "onSendFileError")?;
runtime::spawn(async move {
let ref_id = Arc::new(ref_id);
let result = transfer_file(
ref_id.clone(),
ip,
port as u16,
file_path,
on_send_file_start,
on_send_file_progress,
on_send_file_complete,
)
.await;
if let Err(err) = result {
on_send_file_error
.emit(move |cx| vec![cx.string(ref_id.as_str()).upcast(), cx.string(err.to_string()).upcast()])
}
});
Ok(cx.undefined())
}
async fn transfer_file(
ref_id: Arc<String>,
ip: String,
port: u16,
file_path: String,
on_send_file_start: EventHandler,
on_send_file_progress: EventHandler,
on_send_file_complete: EventHandler,
) -> crate::Result<()> {
let mut socket = TcpStream::connect((ip.as_str(), port))
.await
.context(format!("Failed to connect with the recipient server: {}:{}", ip, port))?;
let file_path = PathBuf::from(file_path)
.canonicalize()
.context("Selected source file does not exist")?;
let name = file_path.file_name().and_then(|name| name.to_str()).unwrap_or("file");
let size = file_path
.metadata()
.context("Failed to get metadata for the selected source file")?
.len();
let cloned_ref_id = ref_id.clone();
on_send_file_start.emit(move |cx| vec![cx.string(cloned_ref_id.as_str()).upcast()]);
let transfer_meta = TransferFileMeta {
name: name.to_owned(),
size,
};
socket
.write_json(&transfer_meta)
.await
.context("Failed to write transfer-meta JSON for the selected source file")?;
let mut source_file = File::open(file_path.as_path())
.await
.context("Failed to open the selected source file")?;
helpers::pipe(&mut source_file, &mut socket, |progress| {
let cloned_ref_id = ref_id.clone();
on_send_file_progress.emit(move |cx| {
vec![
cx.string(cloned_ref_id.as_str()).upcast(),
cx.number(progress as f64).upcast(),
]
});
})
.await
.context("Failed to pipe selected source file data to socket")?;
let cloned_ref_id = ref_id.clone();
on_send_file_complete.emit(move |cx| vec![cx.string(cloned_ref_id.as_str()).upcast()]);
Ok(())
}
|
mod alpha2;
pub use alpha2::Alpha2;
mod alpha3;
pub use alpha3::Alpha3;
mod alpha3b;
pub use alpha3b::Alpha3b;
#[cfg(feature = "language-info")]
mod info;
#[cfg(feature = "language-info")]
pub use info::Info;
|
#![allow(clippy::absurd_extreme_comparisons)]
extern crate oxygengine_procedural as procedural;
mod data_aggregator;
use data_aggregator::*;
use minifb::{Key, KeyRepeat, MouseMode, Scale, Window, WindowOptions};
use procedural::prelude::*;
use std::f64::consts::PI;
const SIZE: usize = 100;
const ALTITUDE_LIMIT: Scalar = 200.0;
const HUMIDITY_LIMIT: Scalar = 0.25;
const TEMPERATURE_LIMIT: Scalar = 100.0;
const WATER_LIMIT: Scalar = 30.0;
const VELOCITY_LIMIT: Scalar = 1.0;
const DIVERGENCE_LIMIT: Scalar = 1.0;
const PRESSURE_LIMIT: Scalar = 1.0;
const SLOPENESS_LIMIT: Scalar = 0.5;
const STEPS_LIMIT: usize = 0;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum VisualisationMode {
Altitude,
Temperature,
Humidity,
SurfaceLevel,
Biome,
Velocity,
Divergence,
Pressure,
Slopeness,
}
fn build_world(altitude_seed: u32) -> World2d {
println!("BUILD WORLD");
let simulation = {
let config = World2dClimateSimulationConfig {
full_year_steps: 364 * 5,
water_capacity: WATER_LIMIT,
altitude_range: 0.0..ALTITUDE_LIMIT,
temperature_range: 0.0..TEMPERATURE_LIMIT,
world_axis_angle: 0.0 * PI as Scalar / 180.0,
mass_diffuse_factor: 0.00001,
// mass_diffuse_factor: 1.0,
// viscosity_factor: 0.00001,
viscosity_factor: 1.0,
viscosity_iterations: 10,
//mass_diffuse_iterations: 10,
poisson_pressure_iterations: 10,
world_core_heating: 0.0,
sun_heating: 0.0,
thermal_radiation: 0.1,
sun_heating_adaptive_correction_factor: 1.0,
..Default::default()
};
World2dClimateSimulation::new(config)
};
let config = World2dConfig {
size: SIZE,
zoom: 10.0,
altitude_range: 0.0..(ALTITUDE_LIMIT * 0.5),
temperature_range: 0.0..TEMPERATURE_LIMIT,
altitude_seed,
temperature_seed: rand::random(),
humidity_seed: rand::random(),
..Default::default()
};
World2d::new(&config, Box::new(simulation))
// World2d::generate(
// SIZE,
// Box::new(simulation),
// |_, _| 0.0,
// // |col, row| {
// // let dx = 74.0 - col as f64;
// // let dy = 44.0 - row as f64;
// // let f = ((dx * dx + dy * dy) / (10.0 * 10.0)).max(0.0).min(1.0);
// // ALTITUDE_LIMIT * (0.9 + 0.1 * (1.0 - f * f * f * f * f * f))
// // },
// // |col, row| {
// // let f = 1.0 - (0.5 + col as f64 / SIZE as f64 - row as f64 / SIZE as f64).max(0.0).min(1.0);
// // ALTITUDE_LIMIT * f
// // },
// // |col, row| if col - 25 > row { 0.0 } else { ALTITUDE_LIMIT },
// // |col, row| if col - 25 > row { ALTITUDE_LIMIT } else { 0.0 },
// // |col, row| ALTITUDE_LIMIT * col as f64 / SIZE as f64,
// // |col, _| if col < SIZE - 20 { 0.0 } else { ALTITUDE_LIMIT },
// |col, row| if col % 2 == 0 { TEMPERATURE_LIMIT * row as f64 / SIZE as f64 } else { 0.0 },
// // |_, _| rand::random::<f64>() * TEMPERATURE_LIMIT,
// |_, _| 0.0,
// |_, _| 0.0,
// )
}
fn main() {
let mut steps = 0;
let mut auto = false;
let mut mode = VisualisationMode::Temperature;
let mut altitude_seed = if let Some(seed) = ::std::env::args().skip(1).last() {
if let Ok(seed) = seed.parse() {
seed
} else {
rand::random()
}
} else {
rand::random()
};
let mut data_aggregator = DataAggregator::new("resources/data.txt");
data_aggregator.set_auto_flush(Some(100));
println!("SEED: {}", altitude_seed);
println!("CREATE WINDOW");
let options = WindowOptions {
scale: Scale::X8,
resize: false,
..Default::default()
};
let mut window = Window::new(
&format!("Procedural World Simulation - {:?}", mode),
SIZE,
SIZE,
options,
)
.unwrap();
let mut world = build_world(altitude_seed);
let buffer = world_to_buffer(mode, &world);
window.update_with_buffer(&buffer, SIZE, SIZE).unwrap();
println!("LOOP START");
while window.is_open() && !window.is_key_down(Key::Escape) {
let mut dirty = false;
if window.is_key_pressed(Key::R, KeyRepeat::No) {
altitude_seed = rand::random();
println!("SEED: {}", altitude_seed);
world = build_world(altitude_seed);
dirty = true;
} else if window.is_key_pressed(Key::Key1, KeyRepeat::No) {
mode = VisualisationMode::Altitude;
dirty = true;
} else if window.is_key_pressed(Key::Key2, KeyRepeat::No) {
mode = VisualisationMode::Temperature;
dirty = true;
} else if window.is_key_pressed(Key::Key3, KeyRepeat::No) {
mode = VisualisationMode::Humidity;
dirty = true;
} else if window.is_key_pressed(Key::Key4, KeyRepeat::No) {
mode = VisualisationMode::SurfaceLevel;
dirty = true;
} else if window.is_key_pressed(Key::Key5, KeyRepeat::No) {
mode = VisualisationMode::Biome;
dirty = true;
} else if window.is_key_pressed(Key::Key6, KeyRepeat::No) {
mode = VisualisationMode::Velocity;
dirty = true;
} else if window.is_key_pressed(Key::Key7, KeyRepeat::No) {
mode = VisualisationMode::Divergence;
dirty = true;
} else if window.is_key_pressed(Key::Key8, KeyRepeat::No) {
mode = VisualisationMode::Pressure;
dirty = true;
} else if window.is_key_pressed(Key::Key9, KeyRepeat::No) {
mode = VisualisationMode::Slopeness;
dirty = true;
} else if window.is_key_pressed(Key::P, KeyRepeat::No) {
auto = !auto;
dirty = true;
}
if auto
|| window.is_key_pressed(Key::Space, KeyRepeat::No)
|| window.is_key_down(Key::Enter)
{
let timer = ::std::time::Instant::now();
world.process();
println!("PROCESSED IN: {:?}", timer.elapsed());
dirty = true;
let sun_heating = &world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.config()
.sun_heating;
data_aggregator.push(*sun_heating);
steps += 1;
if STEPS_LIMIT > 0 && steps >= STEPS_LIMIT {
break;
}
}
if window.is_key_pressed(Key::I, KeyRepeat::No) {
if let Some((x, y)) = window.get_mouse_pos(MouseMode::Clamp) {
show_cell_info(&world, x as usize % SIZE, y as usize % SIZE);
}
}
if window.is_key_pressed(Key::S, KeyRepeat::No) {
println!("WORLD STATS: {:#?}", world.stats());
let sim = world.as_simulation::<World2dClimateSimulation>().unwrap();
println!("SUN HEATING: {}", sim.config().sun_heating);
println!("STEP: {}", steps);
}
if dirty {
let (year, day) = {
let sim = world.as_simulation::<World2dClimateSimulation>().unwrap();
(sim.years(), sim.steps())
};
window.set_title(&format!(
"Procedural World Simulation - {:?} | Year: {} | Day: {}",
mode, year, day
));
window
.update_with_buffer(&world_to_buffer(mode, &world), SIZE, SIZE)
.unwrap();
} else {
window.update();
}
}
println!("LOOP END");
println!("SEED: {}", altitude_seed);
}
fn show_cell_info(world: &World2d, x: usize, y: usize) {
let altitude = world.altitude()[(x, y)];
let temperature = world.temperature()[(x, y)];
let humidity = world.humidity()[(x, y)];
let surface_water = world.surface_water()[(x, y)];
let velocity = if let Some(velocity) = world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.velocity()
{
velocity[(x, y)].into()
} else {
(0.0, 0.0)
};
println!(
"CELL INFO {} x {}\n- altitude: {}\n- temperature: {}\n- humidity: {}\n- surface water: {}\n- velocity: {:?}",
x, y, altitude, temperature, humidity, surface_water, velocity
);
}
fn world_to_buffer(mode: VisualisationMode, world: &World2d) -> Vec<u32> {
match mode {
VisualisationMode::Altitude => world
.remap_region((0, 0)..(SIZE, SIZE), |_, _, altitude, _, _, _| {
let v = (255.0 * altitude / ALTITUDE_LIMIT).max(0.0).min(255.0) as u8;
let v = u32::from(v);
v | v << 8 | v << 16
})
.into(),
VisualisationMode::Temperature => world
.remap_region((0, 0)..(SIZE, SIZE), |_, _, _, temperature, _, _| {
let f = (temperature / TEMPERATURE_LIMIT).max(0.0).min(1.0);
if f >= 0.5 {
let f = (f - 0.5) * 2.0;
let rv = (255.0 * f).max(0.0).min(255.0) as u8;
let rv = u32::from(rv);
let gv = (255.0 * (1.0 - f)).max(0.0).min(255.0) as u8;
let gv = u32::from(gv);
rv << 16 | gv << 8
} else {
let f = f * 2.0;
let gv = (255.0 * f).max(0.0).min(255.0) as u8;
let gv = u32::from(gv);
let bv = (255.0 * (1.0 - f)).max(0.0).min(255.0) as u8;
let bv = u32::from(bv);
gv << 8 | bv
}
})
.into(),
VisualisationMode::Humidity => world
.remap_region((0, 0)..(SIZE, SIZE), |_, _, _, _, humidity, _| {
let v = (255.0 * humidity / HUMIDITY_LIMIT).max(0.0).min(255.0) as u8;
let v = u32::from(v);
v | v << 8 | v << 16
})
.into(),
VisualisationMode::SurfaceLevel => world
.remap_region(
(0, 0)..(SIZE, SIZE),
|_, _, altitude, _, _, surface_water| {
let v = (255.0 * (altitude + surface_water) / ALTITUDE_LIMIT)
.max(0.0)
.min(255.0) as u8;
let v = u32::from(v);
v | v << 8 | v << 16
},
)
.into(),
VisualisationMode::Biome => world
.remap_region(
(0, 0)..(SIZE, SIZE),
|_, _, altitude, temperature, _, surface_water| {
let t = if surface_water < 10.0 {
if temperature < 40.0 {
0
} else if temperature < 90.0 {
1
} else {
2
}
} else if temperature < 15.0 {
0
} else {
1
};
if t == 0 {
let g = (128.0 + 127.0 * altitude / ALTITUDE_LIMIT)
.max(0.0)
.min(255.0) as u8;
let g = u32::from(g);
g | g << 8 | g << 16
} else if t == 1 {
let g = (55.0 + 200.0 * altitude / ALTITUDE_LIMIT)
.max(0.0)
.min(255.0) as u8;
let g = u32::from(g);
let swf = 1.0 - surface_water / WATER_LIMIT;
let swf = 1.0 - swf * swf * swf;
let w = (192.0 * swf).max(0.0).min(255.0) as u8;
let w = u32::from(w);
w | g << 8
} else {
let g = (92.0 + 127.0 * altitude / ALTITUDE_LIMIT)
.max(0.0)
.min(255.0) as u8;
let g = u32::from(g);
0x30 | g << 8 | g << 16
}
},
)
.into(),
VisualisationMode::Velocity => {
if let Some(velocity) = world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.velocity()
{
velocity
.cells()
.iter()
.map(|vel| {
let x = (((vel.0 / VELOCITY_LIMIT) + 1.0) * 0.5 * 255.0)
.max(0.0)
.min(255.0) as u8;
let x = u32::from(x);
let y = (((vel.1 / VELOCITY_LIMIT) + 1.0) * 0.5 * 255.0)
.max(0.0)
.min(255.0) as u8;
let y = u32::from(y);
y | x << 16
})
.collect::<Vec<_>>()
} else {
vec![0x0088_0088; SIZE * SIZE]
}
}
VisualisationMode::Divergence => {
if let Some(divergence) = world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.divergence()
{
divergence
.cells()
.iter()
.map(|div| {
let p = (div / DIVERGENCE_LIMIT).max(0.0);
let n = -(div / DIVERGENCE_LIMIT).min(0.0);
let vp = (p * 255.0).max(0.0).min(255.0) as u8;
let vp = u32::from(vp);
let vn = (n * 255.0).max(0.0).min(255.0) as u8;
let vn = u32::from(vn);
vn | vp << 16
})
.collect::<Vec<_>>()
} else {
vec![0x0000_0000; SIZE * SIZE]
}
}
VisualisationMode::Pressure => {
if let Some(pressure) = world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.pressure()
{
pressure
.cells()
.iter()
.map(|pres| {
let p = (pres / PRESSURE_LIMIT).max(0.0);
let n = -(pres / PRESSURE_LIMIT).min(0.0);
let vp = (p * 255.0).max(0.0).min(255.0) as u8;
let vp = u32::from(vp);
let vn = (n * 255.0).max(0.0).min(255.0) as u8;
let vn = u32::from(vn);
vn | vp << 16
})
.collect::<Vec<_>>()
} else {
vec![0x0000_0000; SIZE * SIZE]
}
}
VisualisationMode::Slopeness => {
if let Some(slopeness) = world
.as_simulation::<World2dClimateSimulation>()
.unwrap()
.slopeness()
{
slopeness
.cells()
.iter()
.map(|vel| {
let x = ((vel.0 / SLOPENESS_LIMIT + 1.0) * 0.5 * 255.0)
.max(0.0)
.min(255.0) as u8;
let x = u32::from(x);
let y = ((vel.1 / SLOPENESS_LIMIT + 1.0) * 0.5 * 255.0)
.max(0.0)
.min(255.0) as u8;
let y = u32::from(y);
y | x << 16
})
.collect::<Vec<_>>()
} else {
vec![0x0088_0088; SIZE * SIZE]
}
}
}
}
|
mod image;
mod ppm;
pub use self::image::*;
pub use self::ppm::*;
|
pub mod application;
pub mod benchmark_command_listener;
pub mod benchmarker;
pub mod build_container;
pub mod build_image;
pub mod build_network;
pub mod simple;
pub mod verifier;
|
// rust-hwid
// (c) 2020 tilda, under MIT license
//! Get a "Hardware ID" for the host machine. This is a UUID
//! which is intended to uniquely represent this entity.
use thiserror::Error;
/// Possible failure cases for [get_id()].
#[derive(Debug, Error)]
pub enum HwIdError {
/// Could not detect a hardware id. This might be caused
/// by a misconfigured system or by this feature not
/// being supported by the system or platform.
#[error("no HWID was found on system")]
NotFound,
/// Found a putative HWID, but something was wrong with
/// it. The `String` argument contains a path or other
/// identifier at which the HWID was found. This will
/// usually indicate something is really wrong with the
/// system.
#[error("{0:?}: contains malformed HWID")]
Malformed(String),
}
#[cfg(target_os = "windows")]
mod hwid {
use super::*;
use winreg::enums::{HKEY_LOCAL_MACHINE, KEY_QUERY_VALUE};
/// Get the hardware ID of this machine. The HWID is
/// obtained from the Windows registry at location
/// `\\\\SOFTWARE\\Microsoft\\Cryptography\\MachineGuid`.
pub fn get_id() -> Result<std::string::String, HwIdError> {
// escaping is fun, right? right???
let hive = winreg::RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey_with_flags("Software\\Microsoft\\Cryptography", KEY_QUERY_VALUE)
.or(Err(HwIdError::NotFound))?;
let id = hive.get_value("MachineGuid").or(Err(HwIdError::NotFound))?;
Ok(id)
}
}
#[cfg(target_os = "macos")]
mod hwid {
use super::*;
/// Get the hardware ID of this machine. The HWID is
/// obtained by running
///
/// ```text
/// ioreg -rd1 -c IOExpertPlatformDevice
/// ```
///
/// and returning the result.
pub fn get_id() -> Result<std::string::String, HwIdError> {
let cmd = std::process::Command::new("ioreg")
.arg("-rd1")
.arg("-c")
.arg("IOPlatformExpertDevice")
.output()
.or(Err(HwIdError::NotFound))?
.stdout;
let out = String::from_utf8(cmd).or(Err(HwIdError::Malformed(String::from("ioreg"))))?;
match out
.lines()
.find(|l| l.contains("IOPlatformUUID"))
.unwrap_or("")
.split('=')
.nth(1)
.unwrap_or("")
.split('\"')
.nth(1)
{
None => Err(HwIdError::NotFound),
Some(id) => {
if id.is_empty() {
Err(HwIdError::Malformed(String::from("ioreg")))
} else {
Ok(id.to_string())
}
}
}
}
}
#[cfg(target_os = "linux")]
mod hwid {
use super::*;
/// Get the hardware ID of this machine. The HWID is
/// obtained from `/var/lib/dbus/machine-id`, or failing
/// that from `/etc/machine-id`.
pub fn get_id() -> Result<std::string::String, HwIdError> {
let paths = ["/var/lib/dbus/machine-id", "/etc/machine-id"];
for p in paths {
if let Ok(id_contents) = std::fs::read_to_string(p) {
let id_str = id_contents
.lines()
.next()
.ok_or_else(|| HwIdError::Malformed(id_contents.to_string()))?;
return Ok(id_str.to_string());
}
}
Err(HwIdError::NotFound)
}
}
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "dragonfly")]
#[cfg(target_os = "openbsd")]
#[cfg(target_os = "netbsd")]
mod hwid {
pub fn get_id() -> std::string::String {
unimplemented!("*BSD support is not implemented")
}
}
pub use crate::hwid::get_id;
|
extern crate tungstenite;
extern crate url;
extern crate native_tls;
mod connection_error;
mod connection;
mod connection_wss;
use crate::connection::{ConnectionEvent, Connection};
use crate::connection_wss::ConnectionWss;
fn main() {
let mut con = ConnectionWss::with_std_tcp_stream();
con.connect("wss://127.0.0.1:2794/").unwrap();
//con.send(vec![cid.clone()], "test-hello".as_bytes().to_vec()).unwrap();
let mut cid: Option<String> = None;
let mut last_send = std::time::Instant::now();
let mut count = 1;
loop {
if last_send.elapsed() >= std::time::Duration::from_secs(1) {
last_send = std::time::Instant::now();
count += 1;
if let Some(cid) = &cid {
con.send(
vec![cid.clone()],
format!("test {}", count).into_bytes()).unwrap();
}
}
let (did_work, evt_lst) = con.poll().unwrap();
for evt in evt_lst {
match evt {
ConnectionEvent::ConnectionError(_id, e) => {
println!("got error: {:?}", e);
cid = None;
}
ConnectionEvent::Connect(id) => {
println!("connected!: {}", &id);
cid = Some(id);
}
ConnectionEvent::Close(id) => {
println!("got close: {}", &id);
return;
}
ConnectionEvent::Message(_id, msg) => {
println!("msg: {}", String::from_utf8_lossy(&msg));
}
}
}
if did_work {
std::thread::sleep(
std::time::Duration::from_millis(1));
} else {
std::thread::sleep(
std::time::Duration::from_millis(100));
}
}
}
|
use ::*;
pub fn run_script(script: &str) {
let script = CString::new(script).unwrap();
unsafe {
emscripten_run_script(script.as_ptr());
}
}
pub fn run_script_i32(script: &str) -> i32 {
let script = CString::new(script).unwrap();
unsafe { emscripten_run_script_int(script.as_ptr()) as i32 }
}
pub fn run_script_string(script: &str) -> String {
let script = CString::new(script).unwrap();
let string = unsafe {
let string = emscripten_run_script_string(script.as_ptr());
CStr::from_ptr(string)
};
String::from(string.to_str().expect("Script returned invalid UTF-8"))
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PriceSheetResult {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PriceSheetModel>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PriceSheetModel {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub pricesheets: Vec<PriceSheetProperties>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PriceSheetProperties {
#[serde(rename = "billingPeriodId", default, skip_serializing_if = "Option::is_none")]
pub billing_period_id: Option<String>,
#[serde(rename = "meterId", default, skip_serializing_if = "Option::is_none")]
pub meter_id: Option<String>,
#[serde(rename = "meterDetails", default, skip_serializing_if = "Option::is_none")]
pub meter_details: Option<MeterDetails>,
#[serde(rename = "unitOfMeasure", default, skip_serializing_if = "Option::is_none")]
pub unit_of_measure: Option<String>,
#[serde(rename = "includedQuantity", default, skip_serializing_if = "Option::is_none")]
pub included_quantity: Option<f64>,
#[serde(rename = "partNumber", default, skip_serializing_if = "Option::is_none")]
pub part_number: Option<String>,
#[serde(rename = "unitPrice", default, skip_serializing_if = "Option::is_none")]
pub unit_price: Option<f64>,
#[serde(rename = "currencyCode", default, skip_serializing_if = "Option::is_none")]
pub currency_code: Option<String>,
#[serde(rename = "offerId", default, skip_serializing_if = "Option::is_none")]
pub offer_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Forecast {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ForecastProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MeterDetails {
#[serde(rename = "meterName", default, skip_serializing_if = "Option::is_none")]
pub meter_name: Option<String>,
#[serde(rename = "meterCategory", default, skip_serializing_if = "Option::is_none")]
pub meter_category: Option<String>,
#[serde(rename = "meterSubCategory", default, skip_serializing_if = "Option::is_none")]
pub meter_sub_category: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "meterLocation", default, skip_serializing_if = "Option::is_none")]
pub meter_location: Option<String>,
#[serde(rename = "totalIncludedQuantity", default, skip_serializing_if = "Option::is_none")]
pub total_included_quantity: Option<f64>,
#[serde(rename = "pretaxStandardRate", default, skip_serializing_if = "Option::is_none")]
pub pretax_standard_rate: Option<f64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageDetail {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<UsageDetailProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageDetailsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<UsageDetail>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageDetailProperties {
#[serde(rename = "billingPeriodId", default, skip_serializing_if = "Option::is_none")]
pub billing_period_id: Option<String>,
#[serde(rename = "invoiceId", default, skip_serializing_if = "Option::is_none")]
pub invoice_id: Option<String>,
#[serde(rename = "usageStart", default, skip_serializing_if = "Option::is_none")]
pub usage_start: Option<String>,
#[serde(rename = "usageEnd", default, skip_serializing_if = "Option::is_none")]
pub usage_end: Option<String>,
#[serde(rename = "instanceName", default, skip_serializing_if = "Option::is_none")]
pub instance_name: Option<String>,
#[serde(rename = "instanceId", default, skip_serializing_if = "Option::is_none")]
pub instance_id: Option<String>,
#[serde(rename = "instanceLocation", default, skip_serializing_if = "Option::is_none")]
pub instance_location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub currency: Option<String>,
#[serde(rename = "usageQuantity", default, skip_serializing_if = "Option::is_none")]
pub usage_quantity: Option<f64>,
#[serde(rename = "billableQuantity", default, skip_serializing_if = "Option::is_none")]
pub billable_quantity: Option<f64>,
#[serde(rename = "pretaxCost", default, skip_serializing_if = "Option::is_none")]
pub pretax_cost: Option<f64>,
#[serde(rename = "isEstimated", default, skip_serializing_if = "Option::is_none")]
pub is_estimated: Option<bool>,
#[serde(rename = "meterId", default, skip_serializing_if = "Option::is_none")]
pub meter_id: Option<String>,
#[serde(rename = "meterDetails", default, skip_serializing_if = "Option::is_none")]
pub meter_details: Option<MeterDetails>,
#[serde(rename = "subscriptionGuid", default, skip_serializing_if = "Option::is_none")]
pub subscription_guid: Option<String>,
#[serde(rename = "subscriptionName", default, skip_serializing_if = "Option::is_none")]
pub subscription_name: Option<String>,
#[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")]
pub account_name: Option<String>,
#[serde(rename = "departmentName", default, skip_serializing_if = "Option::is_none")]
pub department_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
#[serde(rename = "consumedService", default, skip_serializing_if = "Option::is_none")]
pub consumed_service: Option<String>,
#[serde(rename = "costCenter", default, skip_serializing_if = "Option::is_none")]
pub cost_center: Option<String>,
#[serde(rename = "partNumber", default, skip_serializing_if = "Option::is_none")]
pub part_number: Option<String>,
#[serde(rename = "resourceGuid", default, skip_serializing_if = "Option::is_none")]
pub resource_guid: Option<String>,
#[serde(rename = "offerId", default, skip_serializing_if = "Option::is_none")]
pub offer_id: Option<String>,
#[serde(rename = "chargesBilledSeparately", default, skip_serializing_if = "Option::is_none")]
pub charges_billed_separately: Option<bool>,
#[serde(rename = "additionalProperties", default, skip_serializing_if = "Option::is_none")]
pub additional_properties: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ForecastsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Forecast>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ForecastProperties {
#[serde(rename = "usageDate", default, skip_serializing_if = "Option::is_none")]
pub usage_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub grain: Option<forecast_properties::Grain>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub charge: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub currency: Option<String>,
#[serde(rename = "chargeType", default, skip_serializing_if = "Option::is_none")]
pub charge_type: Option<forecast_properties::ChargeType>,
#[serde(rename = "confidenceLevels", default, skip_serializing_if = "Vec::is_empty")]
pub confidence_levels: Vec<serde_json::Value>,
}
pub mod forecast_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Grain {
Daily,
Monthly,
Yearly,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ChargeType {
Actual,
Forecast,
}
}
|
extern crate tempdir;
use tempdir::TempDir;
fn main() {
TempDir::new("control").unwrap();
}
|
#[doc = "Register `MPCBB2_VCTR18` reader"]
pub type R = crate::R<MPCBB2_VCTR18_SPEC>;
#[doc = "Register `MPCBB2_VCTR18` writer"]
pub type W = crate::W<MPCBB2_VCTR18_SPEC>;
#[doc = "Field `B576` reader - B576"]
pub type B576_R = crate::BitReader;
#[doc = "Field `B576` writer - B576"]
pub type B576_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B577` reader - B577"]
pub type B577_R = crate::BitReader;
#[doc = "Field `B577` writer - B577"]
pub type B577_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B578` reader - B578"]
pub type B578_R = crate::BitReader;
#[doc = "Field `B578` writer - B578"]
pub type B578_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B579` reader - B579"]
pub type B579_R = crate::BitReader;
#[doc = "Field `B579` writer - B579"]
pub type B579_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B580` reader - B580"]
pub type B580_R = crate::BitReader;
#[doc = "Field `B580` writer - B580"]
pub type B580_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B581` reader - B581"]
pub type B581_R = crate::BitReader;
#[doc = "Field `B581` writer - B581"]
pub type B581_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B582` reader - B582"]
pub type B582_R = crate::BitReader;
#[doc = "Field `B582` writer - B582"]
pub type B582_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B583` reader - B583"]
pub type B583_R = crate::BitReader;
#[doc = "Field `B583` writer - B583"]
pub type B583_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B584` reader - B584"]
pub type B584_R = crate::BitReader;
#[doc = "Field `B584` writer - B584"]
pub type B584_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B585` reader - B585"]
pub type B585_R = crate::BitReader;
#[doc = "Field `B585` writer - B585"]
pub type B585_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B586` reader - B586"]
pub type B586_R = crate::BitReader;
#[doc = "Field `B586` writer - B586"]
pub type B586_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B587` reader - B587"]
pub type B587_R = crate::BitReader;
#[doc = "Field `B587` writer - B587"]
pub type B587_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B588` reader - B588"]
pub type B588_R = crate::BitReader;
#[doc = "Field `B588` writer - B588"]
pub type B588_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B589` reader - B589"]
pub type B589_R = crate::BitReader;
#[doc = "Field `B589` writer - B589"]
pub type B589_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B590` reader - B590"]
pub type B590_R = crate::BitReader;
#[doc = "Field `B590` writer - B590"]
pub type B590_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B591` reader - B591"]
pub type B591_R = crate::BitReader;
#[doc = "Field `B591` writer - B591"]
pub type B591_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B592` reader - B592"]
pub type B592_R = crate::BitReader;
#[doc = "Field `B592` writer - B592"]
pub type B592_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B593` reader - B593"]
pub type B593_R = crate::BitReader;
#[doc = "Field `B593` writer - B593"]
pub type B593_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B594` reader - B594"]
pub type B594_R = crate::BitReader;
#[doc = "Field `B594` writer - B594"]
pub type B594_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B595` reader - B595"]
pub type B595_R = crate::BitReader;
#[doc = "Field `B595` writer - B595"]
pub type B595_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B596` reader - B596"]
pub type B596_R = crate::BitReader;
#[doc = "Field `B596` writer - B596"]
pub type B596_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B597` reader - B597"]
pub type B597_R = crate::BitReader;
#[doc = "Field `B597` writer - B597"]
pub type B597_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B598` reader - B598"]
pub type B598_R = crate::BitReader;
#[doc = "Field `B598` writer - B598"]
pub type B598_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B599` reader - B599"]
pub type B599_R = crate::BitReader;
#[doc = "Field `B599` writer - B599"]
pub type B599_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B600` reader - B600"]
pub type B600_R = crate::BitReader;
#[doc = "Field `B600` writer - B600"]
pub type B600_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B601` reader - B601"]
pub type B601_R = crate::BitReader;
#[doc = "Field `B601` writer - B601"]
pub type B601_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B602` reader - B602"]
pub type B602_R = crate::BitReader;
#[doc = "Field `B602` writer - B602"]
pub type B602_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B603` reader - B603"]
pub type B603_R = crate::BitReader;
#[doc = "Field `B603` writer - B603"]
pub type B603_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B604` reader - B604"]
pub type B604_R = crate::BitReader;
#[doc = "Field `B604` writer - B604"]
pub type B604_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B605` reader - B605"]
pub type B605_R = crate::BitReader;
#[doc = "Field `B605` writer - B605"]
pub type B605_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B606` reader - B606"]
pub type B606_R = crate::BitReader;
#[doc = "Field `B606` writer - B606"]
pub type B606_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `B607` reader - B607"]
pub type B607_R = crate::BitReader;
#[doc = "Field `B607` writer - B607"]
pub type B607_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - B576"]
#[inline(always)]
pub fn b576(&self) -> B576_R {
B576_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - B577"]
#[inline(always)]
pub fn b577(&self) -> B577_R {
B577_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - B578"]
#[inline(always)]
pub fn b578(&self) -> B578_R {
B578_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - B579"]
#[inline(always)]
pub fn b579(&self) -> B579_R {
B579_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - B580"]
#[inline(always)]
pub fn b580(&self) -> B580_R {
B580_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - B581"]
#[inline(always)]
pub fn b581(&self) -> B581_R {
B581_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - B582"]
#[inline(always)]
pub fn b582(&self) -> B582_R {
B582_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - B583"]
#[inline(always)]
pub fn b583(&self) -> B583_R {
B583_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - B584"]
#[inline(always)]
pub fn b584(&self) -> B584_R {
B584_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - B585"]
#[inline(always)]
pub fn b585(&self) -> B585_R {
B585_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - B586"]
#[inline(always)]
pub fn b586(&self) -> B586_R {
B586_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - B587"]
#[inline(always)]
pub fn b587(&self) -> B587_R {
B587_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - B588"]
#[inline(always)]
pub fn b588(&self) -> B588_R {
B588_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - B589"]
#[inline(always)]
pub fn b589(&self) -> B589_R {
B589_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - B590"]
#[inline(always)]
pub fn b590(&self) -> B590_R {
B590_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - B591"]
#[inline(always)]
pub fn b591(&self) -> B591_R {
B591_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - B592"]
#[inline(always)]
pub fn b592(&self) -> B592_R {
B592_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - B593"]
#[inline(always)]
pub fn b593(&self) -> B593_R {
B593_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - B594"]
#[inline(always)]
pub fn b594(&self) -> B594_R {
B594_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - B595"]
#[inline(always)]
pub fn b595(&self) -> B595_R {
B595_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - B596"]
#[inline(always)]
pub fn b596(&self) -> B596_R {
B596_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - B597"]
#[inline(always)]
pub fn b597(&self) -> B597_R {
B597_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - B598"]
#[inline(always)]
pub fn b598(&self) -> B598_R {
B598_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - B599"]
#[inline(always)]
pub fn b599(&self) -> B599_R {
B599_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - B600"]
#[inline(always)]
pub fn b600(&self) -> B600_R {
B600_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - B601"]
#[inline(always)]
pub fn b601(&self) -> B601_R {
B601_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - B602"]
#[inline(always)]
pub fn b602(&self) -> B602_R {
B602_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - B603"]
#[inline(always)]
pub fn b603(&self) -> B603_R {
B603_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - B604"]
#[inline(always)]
pub fn b604(&self) -> B604_R {
B604_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - B605"]
#[inline(always)]
pub fn b605(&self) -> B605_R {
B605_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - B606"]
#[inline(always)]
pub fn b606(&self) -> B606_R {
B606_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - B607"]
#[inline(always)]
pub fn b607(&self) -> B607_R {
B607_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - B576"]
#[inline(always)]
#[must_use]
pub fn b576(&mut self) -> B576_W<MPCBB2_VCTR18_SPEC, 0> {
B576_W::new(self)
}
#[doc = "Bit 1 - B577"]
#[inline(always)]
#[must_use]
pub fn b577(&mut self) -> B577_W<MPCBB2_VCTR18_SPEC, 1> {
B577_W::new(self)
}
#[doc = "Bit 2 - B578"]
#[inline(always)]
#[must_use]
pub fn b578(&mut self) -> B578_W<MPCBB2_VCTR18_SPEC, 2> {
B578_W::new(self)
}
#[doc = "Bit 3 - B579"]
#[inline(always)]
#[must_use]
pub fn b579(&mut self) -> B579_W<MPCBB2_VCTR18_SPEC, 3> {
B579_W::new(self)
}
#[doc = "Bit 4 - B580"]
#[inline(always)]
#[must_use]
pub fn b580(&mut self) -> B580_W<MPCBB2_VCTR18_SPEC, 4> {
B580_W::new(self)
}
#[doc = "Bit 5 - B581"]
#[inline(always)]
#[must_use]
pub fn b581(&mut self) -> B581_W<MPCBB2_VCTR18_SPEC, 5> {
B581_W::new(self)
}
#[doc = "Bit 6 - B582"]
#[inline(always)]
#[must_use]
pub fn b582(&mut self) -> B582_W<MPCBB2_VCTR18_SPEC, 6> {
B582_W::new(self)
}
#[doc = "Bit 7 - B583"]
#[inline(always)]
#[must_use]
pub fn b583(&mut self) -> B583_W<MPCBB2_VCTR18_SPEC, 7> {
B583_W::new(self)
}
#[doc = "Bit 8 - B584"]
#[inline(always)]
#[must_use]
pub fn b584(&mut self) -> B584_W<MPCBB2_VCTR18_SPEC, 8> {
B584_W::new(self)
}
#[doc = "Bit 9 - B585"]
#[inline(always)]
#[must_use]
pub fn b585(&mut self) -> B585_W<MPCBB2_VCTR18_SPEC, 9> {
B585_W::new(self)
}
#[doc = "Bit 10 - B586"]
#[inline(always)]
#[must_use]
pub fn b586(&mut self) -> B586_W<MPCBB2_VCTR18_SPEC, 10> {
B586_W::new(self)
}
#[doc = "Bit 11 - B587"]
#[inline(always)]
#[must_use]
pub fn b587(&mut self) -> B587_W<MPCBB2_VCTR18_SPEC, 11> {
B587_W::new(self)
}
#[doc = "Bit 12 - B588"]
#[inline(always)]
#[must_use]
pub fn b588(&mut self) -> B588_W<MPCBB2_VCTR18_SPEC, 12> {
B588_W::new(self)
}
#[doc = "Bit 13 - B589"]
#[inline(always)]
#[must_use]
pub fn b589(&mut self) -> B589_W<MPCBB2_VCTR18_SPEC, 13> {
B589_W::new(self)
}
#[doc = "Bit 14 - B590"]
#[inline(always)]
#[must_use]
pub fn b590(&mut self) -> B590_W<MPCBB2_VCTR18_SPEC, 14> {
B590_W::new(self)
}
#[doc = "Bit 15 - B591"]
#[inline(always)]
#[must_use]
pub fn b591(&mut self) -> B591_W<MPCBB2_VCTR18_SPEC, 15> {
B591_W::new(self)
}
#[doc = "Bit 16 - B592"]
#[inline(always)]
#[must_use]
pub fn b592(&mut self) -> B592_W<MPCBB2_VCTR18_SPEC, 16> {
B592_W::new(self)
}
#[doc = "Bit 17 - B593"]
#[inline(always)]
#[must_use]
pub fn b593(&mut self) -> B593_W<MPCBB2_VCTR18_SPEC, 17> {
B593_W::new(self)
}
#[doc = "Bit 18 - B594"]
#[inline(always)]
#[must_use]
pub fn b594(&mut self) -> B594_W<MPCBB2_VCTR18_SPEC, 18> {
B594_W::new(self)
}
#[doc = "Bit 19 - B595"]
#[inline(always)]
#[must_use]
pub fn b595(&mut self) -> B595_W<MPCBB2_VCTR18_SPEC, 19> {
B595_W::new(self)
}
#[doc = "Bit 20 - B596"]
#[inline(always)]
#[must_use]
pub fn b596(&mut self) -> B596_W<MPCBB2_VCTR18_SPEC, 20> {
B596_W::new(self)
}
#[doc = "Bit 21 - B597"]
#[inline(always)]
#[must_use]
pub fn b597(&mut self) -> B597_W<MPCBB2_VCTR18_SPEC, 21> {
B597_W::new(self)
}
#[doc = "Bit 22 - B598"]
#[inline(always)]
#[must_use]
pub fn b598(&mut self) -> B598_W<MPCBB2_VCTR18_SPEC, 22> {
B598_W::new(self)
}
#[doc = "Bit 23 - B599"]
#[inline(always)]
#[must_use]
pub fn b599(&mut self) -> B599_W<MPCBB2_VCTR18_SPEC, 23> {
B599_W::new(self)
}
#[doc = "Bit 24 - B600"]
#[inline(always)]
#[must_use]
pub fn b600(&mut self) -> B600_W<MPCBB2_VCTR18_SPEC, 24> {
B600_W::new(self)
}
#[doc = "Bit 25 - B601"]
#[inline(always)]
#[must_use]
pub fn b601(&mut self) -> B601_W<MPCBB2_VCTR18_SPEC, 25> {
B601_W::new(self)
}
#[doc = "Bit 26 - B602"]
#[inline(always)]
#[must_use]
pub fn b602(&mut self) -> B602_W<MPCBB2_VCTR18_SPEC, 26> {
B602_W::new(self)
}
#[doc = "Bit 27 - B603"]
#[inline(always)]
#[must_use]
pub fn b603(&mut self) -> B603_W<MPCBB2_VCTR18_SPEC, 27> {
B603_W::new(self)
}
#[doc = "Bit 28 - B604"]
#[inline(always)]
#[must_use]
pub fn b604(&mut self) -> B604_W<MPCBB2_VCTR18_SPEC, 28> {
B604_W::new(self)
}
#[doc = "Bit 29 - B605"]
#[inline(always)]
#[must_use]
pub fn b605(&mut self) -> B605_W<MPCBB2_VCTR18_SPEC, 29> {
B605_W::new(self)
}
#[doc = "Bit 30 - B606"]
#[inline(always)]
#[must_use]
pub fn b606(&mut self) -> B606_W<MPCBB2_VCTR18_SPEC, 30> {
B606_W::new(self)
}
#[doc = "Bit 31 - B607"]
#[inline(always)]
#[must_use]
pub fn b607(&mut self) -> B607_W<MPCBB2_VCTR18_SPEC, 31> {
B607_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb2_vctr18::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb2_vctr18::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MPCBB2_VCTR18_SPEC;
impl crate::RegisterSpec for MPCBB2_VCTR18_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mpcbb2_vctr18::R`](R) reader structure"]
impl crate::Readable for MPCBB2_VCTR18_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mpcbb2_vctr18::W`](W) writer structure"]
impl crate::Writable for MPCBB2_VCTR18_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MPCBB2_VCTR18 to value 0"]
impl crate::Resettable for MPCBB2_VCTR18_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::atom::common::Common;
use super::atom::{
btn::{self, Btn},
text::Text,
};
use super::molecule::modal::{self, Modal};
use super::organism::modal_resource::{self, ModalResource};
use crate::arena::{
block,
resource::{self, LoadFrom},
ArenaMut, BlockKind, BlockMut, BlockRef,
};
use crate::libs::random_id::U128Id;
use isaribi::{
style,
styled::{Style, Styled},
};
use kagura::prelude::*;
use nusa::prelude::*;
use std::collections::HashSet;
pub struct Props {
pub arena: ArenaMut,
pub world: BlockMut<block::World>,
pub terran_texture: BlockMut<block::TerranTexture>,
}
pub enum Msg {
NoOp,
Sub(On),
CloseModal,
CreateTexure,
SelectCustomTextureImage(u32),
SetCustomTextureImage(u32, Option<BlockRef<resource::BlockTexture>>),
}
pub enum On {
Close,
UpdateBlocks {
insert: HashSet<U128Id>,
update: HashSet<U128Id>,
},
}
pub struct ModalCreateTerranTexture {
arena: ArenaMut,
world: BlockMut<block::World>,
terran_texture: BlockMut<block::TerranTexture>,
custom_texture: [BlockRef<resource::BlockTexture>; block::terran_texture::TEX_NUM],
showing_modal: ShowingModal,
}
enum ShowingModal {
None,
SelectCustomTextureImage(u32),
}
impl Component for ModalCreateTerranTexture {
type Props = Props;
type Msg = Msg;
type Event = On;
}
impl HtmlComponent for ModalCreateTerranTexture {}
impl Constructor for ModalCreateTerranTexture {
fn constructor(props: Props) -> Self {
let custom_texture = props
.terran_texture
.map(|texture| {
array_macro::array![i => BlockRef::clone(&texture.textures()[i]); block::terran_texture::TEX_NUM]
})
.unwrap_or_else(
|| array_macro::array![_ => BlockRef::none(); block::terran_texture::TEX_NUM],
);
Self {
arena: props.arena,
world: props.world,
terran_texture: props.terran_texture,
custom_texture,
showing_modal: ShowingModal::None,
}
}
}
impl Update for ModalCreateTerranTexture {
fn update(mut self: Pin<&mut Self>, msg: Msg) -> Cmd<Self> {
match msg {
Msg::NoOp => Cmd::none(),
Msg::Sub(sub) => Cmd::submit(sub),
Msg::CloseModal => {
self.showing_modal = ShowingModal::None;
Cmd::none()
}
Msg::CreateTexure => {
let textures = self
.custom_texture
.iter()
.enumerate()
.map(|(tex_idx, tex)| (tex_idx as u32, BlockRef::clone(&tex)))
.collect::<Vec<_>>();
let is_update_texture = self.terran_texture.update(|terran_texture| {
terran_texture.set_textures(textures.into_iter());
});
if is_update_texture {
Cmd::list(vec![
Cmd::submit(On::UpdateBlocks {
insert: set! {},
update: set! {self.terran_texture.id()},
}),
Cmd::submit(On::Close),
])
} else {
let mut terran_texture = block::TerranTexture::new();
terran_texture.set_textures(
self.custom_texture
.iter()
.enumerate()
.map(|(tex_idx, tex)| (tex_idx as u32, BlockRef::clone(&tex))),
);
let terran_texture = self.arena.insert(terran_texture);
let terran_texture_id = terran_texture.id();
self.world.update(|world| {
world.push_terran_texture_block(terran_texture);
});
Cmd::list(vec![
Cmd::submit(On::UpdateBlocks {
insert: set! {terran_texture_id},
update: set! {self.world.id()},
}),
Cmd::submit(On::Close),
])
}
}
Msg::SelectCustomTextureImage(direction) => {
self.showing_modal = ShowingModal::SelectCustomTextureImage(direction);
Cmd::none()
}
Msg::SetCustomTextureImage(direction, img) => {
if direction < block::terran_texture::TEX_NUM as u32 {
self.custom_texture[direction as usize] =
img.unwrap_or_else(|| BlockRef::none());
}
self.showing_modal = ShowingModal::None;
Cmd::none()
}
}
}
}
impl Render<Html> for ModalCreateTerranTexture {
type Children = ();
fn render(&self, _: Self::Children) -> Html {
Self::styled(Modal::new(
self,
None,
modal::Props {},
Sub::map(|sub| match sub {
modal::On::Close => Msg::Sub(On::Close),
}),
(
String::from("新規ブロック用テクスチャを作成"),
String::from(""),
vec![
Html::div(
Attributes::new().class(Self::class("base")),
Events::new(),
vec![
Html::div(
Attributes::new().class(Self::class("kind-list")),
Events::new(),
vec![Btn::with_variant(
btn::Variant::PrimaryLikeMenu,
Attributes::new(),
Events::new(),
vec![Html::text("画像から作成")],
)],
),
self.render_custom_texture_editer(),
Html::div(
Attributes::new().class(Self::class("controller")),
Events::new(),
vec![Btn::primary(
Attributes::new(),
Events::new().on_click(self, |_| Msg::CreateTexure),
vec![Html::text("作成")],
)],
),
],
),
self.render_modal(),
],
),
))
}
}
impl ModalCreateTerranTexture {
fn render_modal(&self) -> Html {
match &self.showing_modal {
ShowingModal::None => Common::none(),
ShowingModal::SelectCustomTextureImage(direction) => ModalResource::empty(
self,
None,
modal_resource::Props {
arena: ArenaMut::clone(&self.arena),
world: BlockMut::clone(&self.world),
filter: set! {BlockKind::BlockTexture},
is_selecter: true,
title: String::from(modal_resource::title::SELECT_TEXTURE),
},
Sub::map({
let direction = *direction;
move |sub| match sub {
modal_resource::On::SelectBlockTexture(img) => {
Msg::SetCustomTextureImage(direction, Some(img))
}
modal_resource::On::SelectNone => {
Msg::SetCustomTextureImage(direction, None)
}
modal_resource::On::Close => Msg::CloseModal,
_ => Msg::NoOp,
}
}),
),
}
}
fn render_custom_texture_editer(&self) -> Html {
Html::div(
Attributes::new()
.class(Self::class("editer"))
.class(Self::class("custom-editer")),
Events::new(),
self.custom_texture
.iter()
.enumerate()
.map(|(tex_idx, tex)| self.render_custom_texture_cell(tex_idx as u32, tex))
.collect(),
)
}
fn render_custom_texture_cell(
&self,
direction: u32,
texture: &BlockRef<resource::BlockTexture>,
) -> Html {
Html::div(
Attributes::new().class(Self::class("texture-cell")),
Events::new().on_click(self, move |_| Msg::SelectCustomTextureImage(direction)),
vec![
if let Some(src) = texture.map(|texture| texture.data().url().to_string()) {
Html::img(
Attributes::new()
.draggable("false")
.class(Common::bg_transparent())
.src(src),
Events::new(),
vec![],
)
} else {
Html::none()
},
Text::span(format!("ブロック-{}", direction)),
],
)
}
}
impl Styled for ModalCreateTerranTexture {
fn style() -> Style {
let cell_size = 7.5;
style! {
".base" {
"width": "100%";
"height": "100%";
"display": "grid";
"grid-template-columns": "max-content 1fr";
"grid-template-rows": "1fr max-content";
"overflow": "hidden";
}
".kind-list" {
"grid-column": "1 / 2";
"grid-row": "1 / 3";
"display": "flex";
"flex-direction": "column";
"overflow-y": "scroll";
}
".editer" {
"grid-column": "2 / 3";
"grid-row": "1 / 2";
"overflow-y": "scroll";
}
".custom-editer" {
"display": "grid";
"grid-template-columns": "repeat(4, max-content)";
"grid-template-rows": "repeat(3, max-content)";
"column-gap": ".35rem";
"row-gap": ".35rem";
"justify-content": "center";
}
".prefab-editer" {
"display": "flex";
"justify-content": "center";
"align-items": "center";
}
".controller" {
"grid-column": "2 / 3";
"grid-row": "2 / 3";
"display": "grid";
"grid-auto-columns": "max-content";
"justify-content": "end";
}
".texture-cell" {
"width": format!("{}rem", cell_size);
"height": format!("{}rem", cell_size);
"border-style": "solid";
"border-width": ".1rem";
"position": "relative";
}
".texture-cell:hover" {
"cursor": "pointer";
}
".texture-cell > img" {
"width": "100%";
"height": "100%";
"object-fit": "fill";
}
".texture-cell > span" {
"position": "absolute";
"top": "0";
"left": "0";
"color": crate::libs::color::Pallet::gray(0);
"-webkit-text-stroke": format!("1px {}", crate::libs::color::Pallet::gray(9));
}
}
}
}
|
use std::collections::HashSet;
use std::env;
fn knot_hash(lengths: &Vec<u8>) -> Vec<u8> {
let mut list: Vec<u8> = (0..=255).collect();
let len = list.len();
let mut pos = 0usize;
let mut skip_size = 0;
for _i in 0..64 {
for &length in lengths {
let length = length as usize;
for i in 0..(length / 2) as usize {
let start = (pos + i) % len;
let end = (pos + length - i - 1) % len;
let tmp = list[start];
list[start] = list[end];
list[end] = tmp;
}
pos = (pos + length + skip_size) % len;
skip_size += 1;
}
}
list
}
fn knot_hash_str(s: &str) -> Vec<u8> {
let mut lengths: Vec<u8> = s.as_bytes().to_vec();
lengths.append(&mut vec![17, 31, 73, 47, 23]);
let hash = knot_hash(&lengths);
let dense_hash: Vec<u8> = (0..16)
.map(|n| hash.iter().skip(n * 16).take(16).fold(0, |acc, b| acc ^ b))
.collect();
dense_hash
}
fn visit(grid: &Vec<Vec<bool>>, row: i32, column: i32, set: &mut HashSet<(i32, i32)>) {
set.insert((row, column));
for (dr, dc) in &[(0, -1), (1, 0), (0, 1), (-1, 0)] {
let nr = row + dr;
let nc = column + dc;
if nr >= 0
&& (nr as usize) < grid.len()
&& nc >= 0
&& (nc as usize) < grid.len()
&& grid[nr as usize][nc as usize]
&& !set.contains(&(nr, nc))
{
visit(&grid, nr, nc, set);
}
}
}
fn count_groups(grid: &Vec<Vec<bool>>) -> usize {
let mut set = HashSet::new();
let mut groups = 0usize;
for row in 0..128 as i32 {
for column in 0..128 as i32 {
if grid[row as usize][column as usize] && !set.contains(&(row, column)) {
visit(&grid, row, column, &mut set);
groups += 1;
}
}
}
groups
}
fn main() {
let args: Vec<String> = env::args().collect();
let input = &args[1];
let grid: Vec<Vec<bool>> = (0..128)
.map(|row| {
let hash = knot_hash_str(&format!("{}-{}", input, row));
hash.iter()
.map(|x| {
(0..8)
.rev()
.map(|shift| (x >> shift) as i32 & 1 == 1)
.collect::<Vec<bool>>()
})
.flatten()
.collect()
})
.collect();
println!(
"part 1: {}",
grid.iter().flatten().fold(0, |sum, &x| sum + x as usize)
);
println!("part 2: {}", count_groups(&grid));
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorBase {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorBase>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigDefinition {
#[serde(rename = "type")]
pub type_: report_config_definition::Type,
pub timeframe: report_config_definition::Timeframe,
#[serde(rename = "timePeriod", default, skip_serializing_if = "Option::is_none")]
pub time_period: Option<ReportConfigTimePeriod>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dataset: Option<ReportConfigDataset>,
}
pub mod report_config_definition {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Usage,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Timeframe {
WeekToDate,
MonthToDate,
YearToDate,
Custom,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigTimePeriod {
pub from: String,
pub to: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigDataset {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub granularity: Option<report_config_dataset::Granularity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub configuration: Option<ReportConfigDatasetConfiguration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub aggregation: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub grouping: Vec<ReportConfigGrouping>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub sorting: Vec<ReportConfigSorting>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub filter: Option<ReportConfigFilter>,
}
pub mod report_config_dataset {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Granularity {
Daily,
Monthly,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigDatasetConfiguration {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub columns: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigAggregation {
pub name: String,
pub function: report_config_aggregation::Function,
}
pub mod report_config_aggregation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Function {
Sum,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigSorting {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direction: Option<report_config_sorting::Direction>,
pub name: String,
}
pub mod report_config_sorting {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Direction {
Ascending,
Descending,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigGrouping {
#[serde(rename = "type")]
pub type_: ReportConfigColumnType,
pub name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigFilter {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub and: Vec<ReportConfigFilter>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub or: Vec<ReportConfigFilter>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub not: Box<Option<ReportConfigFilter>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub dimension: Option<ReportConfigComparisonExpression>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tag: Option<ReportConfigComparisonExpression>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReportConfigColumnType {
Tag,
Dimension,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ReportConfigComparisonExpression {
pub name: String,
pub operator: report_config_comparison_expression::Operator,
pub values: Vec<String>,
}
pub mod report_config_comparison_expression {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Operator {
In,
Contains,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DimensionsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Dimension>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Dimension {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DimensionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DimensionProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "filterEnabled", default, skip_serializing_if = "Option::is_none")]
pub filter_enabled: Option<bool>,
#[serde(rename = "groupingEnabled", default, skip_serializing_if = "Option::is_none")]
pub grouping_enabled: Option<bool>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub data: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub total: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(rename = "usageStart", default, skip_serializing_if = "Option::is_none")]
pub usage_start: Option<String>,
#[serde(rename = "usageEnd", default, skip_serializing_if = "Option::is_none")]
pub usage_end: Option<String>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QueryResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Query>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Query {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<QueryProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QueryProperties {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub columns: Vec<QueryColumn>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub rows: Vec<Vec<serde_json::Value>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct QueryColumn {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectorDefinitionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ConnectorDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectorDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ConnectorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectorProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "providerBillingAccountId", default, skip_serializing_if = "Option::is_none")]
pub provider_billing_account_id: Option<String>,
#[serde(rename = "providerBillingAccountDisplayName", default, skip_serializing_if = "Option::is_none")]
pub provider_billing_account_display_name: Option<String>,
#[serde(rename = "credentialsKey", default, skip_serializing_if = "Option::is_none")]
pub credentials_key: Option<String>,
#[serde(rename = "credentialsSecret", default, skip_serializing_if = "Option::is_none")]
pub credentials_secret: Option<String>,
#[serde(rename = "reportId", default, skip_serializing_if = "Option::is_none")]
pub report_id: Option<String>,
#[serde(rename = "createdOn", default, skip_serializing_if = "Option::is_none")]
pub created_on: Option<String>,
#[serde(rename = "modifiedOn", default, skip_serializing_if = "Option::is_none")]
pub modified_on: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<connector_properties::Status>,
#[serde(rename = "externalBillingAccountId", default, skip_serializing_if = "Option::is_none")]
pub external_billing_account_id: Option<String>,
#[serde(rename = "defaultManagementGroupId", default, skip_serializing_if = "Option::is_none")]
pub default_management_group_id: Option<String>,
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[serde(rename = "billingModel", default, skip_serializing_if = "Option::is_none")]
pub billing_model: Option<connector_properties::BillingModel>,
#[serde(rename = "daysTrialRemaining", default, skip_serializing_if = "Option::is_none")]
pub days_trial_remaining: Option<i64>,
#[serde(rename = "collectionInfo", default, skip_serializing_if = "Option::is_none")]
pub collection_info: Option<ConnectorCollectionInfo>,
}
pub mod connector_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "active")]
Active,
#[serde(rename = "error")]
Error,
#[serde(rename = "expired")]
Expired,
#[serde(rename = "warning")]
Warning,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BillingModel {
#[serde(rename = "trial")]
Trial,
#[serde(rename = "autoUpgrade")]
AutoUpgrade,
#[serde(rename = "premium")]
Premium,
#[serde(rename = "expired")]
Expired,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectorCollectionInfo {
#[serde(rename = "lastChecked", default, skip_serializing_if = "Option::is_none")]
pub last_checked: Option<String>,
#[serde(rename = "sourceLastUpdated", default, skip_serializing_if = "Option::is_none")]
pub source_last_updated: Option<String>,
#[serde(rename = "lastUpdated", default, skip_serializing_if = "Option::is_none")]
pub last_updated: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ConnectorCollectionErrorInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ConnectorCollectionErrorInfo {
#[serde(rename = "errorMessage", default, skip_serializing_if = "Option::is_none")]
pub error_message: Option<String>,
#[serde(rename = "errorInnerMessage", default, skip_serializing_if = "Option::is_none")]
pub error_inner_message: Option<String>,
#[serde(rename = "errorCode", default, skip_serializing_if = "Option::is_none")]
pub error_code: Option<String>,
#[serde(rename = "errorStartTime", default, skip_serializing_if = "Option::is_none")]
pub error_start_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckEligibilityDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(rename = "credentialsKey", default, skip_serializing_if = "Option::is_none")]
pub credentials_key: Option<String>,
#[serde(rename = "credentialsSecret", default, skip_serializing_if = "Option::is_none")]
pub credentials_secret: Option<String>,
#[serde(rename = "reportId", default, skip_serializing_if = "Option::is_none")]
pub report_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalBillingAccountDefinitionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ExternalBillingAccountDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalBillingAccountDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ExternalBillingAccountProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalBillingAccountProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "providerBillingAccountId", default, skip_serializing_if = "Option::is_none")]
pub provider_billing_account_id: Option<String>,
#[serde(rename = "connectorId", default, skip_serializing_if = "Option::is_none")]
pub connector_id: Option<String>,
#[serde(rename = "collectionInfo", default, skip_serializing_if = "Option::is_none")]
pub collection_info: Option<ConnectorCollectionInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalSubscriptionDefinitionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ExternalSubscriptionDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalSubscriptionDefinition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ExternalSubscriptionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalSubscriptionProperties {
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "managementGroupId", default, skip_serializing_if = "Option::is_none")]
pub management_group_id: Option<String>,
#[serde(rename = "providerBillingAccountId", default, skip_serializing_if = "Option::is_none")]
pub provider_billing_account_id: Option<String>,
#[serde(rename = "providerAccountId", default, skip_serializing_if = "Option::is_none")]
pub provider_account_id: Option<String>,
#[serde(rename = "externalBillingAccountId", default, skip_serializing_if = "Option::is_none")]
pub external_billing_account_id: Option<String>,
#[serde(rename = "collectionInfo", default, skip_serializing_if = "Option::is_none")]
pub collection_info: Option<ConnectorCollectionInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExternalSubscriptionIdListRequest {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ShowbackRuleListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ShowbackRule>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ShowbackRule {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ShowbackRuleProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ShowbackRuleProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<showback_rule_properties::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<i64>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub scopes: Vec<Scope>,
#[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")]
pub creation_time: Option<String>,
#[serde(rename = "deprecationTime", default, skip_serializing_if = "Option::is_none")]
pub deprecation_time: Option<String>,
#[serde(rename = "modificationTime", default, skip_serializing_if = "Option::is_none")]
pub modification_time: Option<String>,
#[serde(rename = "ruleType")]
pub rule_type: showback_rule_properties::RuleType,
}
pub mod showback_rule_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
NotActive,
Active,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RuleType {
CustomPrice,
CostAllocation,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Scope {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "childScope", default, skip_serializing_if = "Option::is_none")]
pub child_scope: Box<Option<Scope>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Markup {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub percentage: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomPriceDetailsKind {
#[serde(flatten)]
pub showback_rule_properties: ShowbackRuleProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<CustomPriceDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CustomPriceDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub pricesheet: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub benefits: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub markups: Vec<Markup>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CostAllocationDetailsKind {
#[serde(flatten)]
pub showback_rule_properties: ShowbackRuleProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<CostAllocationDetails>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CostAllocationDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub policy: Option<cost_allocation_details::Policy>,
}
pub mod cost_allocation_details {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Policy {
Proportional,
Evenly,
Fixed,
}
}
|
use crate::Value;
extern "C" {
pub fn caml_main(argv: *const *const i8);
pub fn caml_startup(argv: *const *const i8);
pub fn caml_shutdown();
pub fn caml_named_value(name: *const i8) -> *const Value;
}
|
use std::borrow::Cow;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use typed_builder::TypedBuilder;
#[non_exhaustive]
#[derive(Debug, Clone, TypedBuilder)]
pub struct Request<'a> {
pub repo_path: Cow<'a, Path>,
}
#[non_exhaustive]
#[derive(Debug, Clone, Default, TypedBuilder)]
pub struct PartialRequest<'a> {
#[builder(default)]
pub repo_path: Option<&'a Path>,
}
#[derive(Debug, thiserror::Error)]
pub enum RequestError {
#[error("Provided path was invalid")]
PathError(#[source] io::Error),
#[error("Could not find repository at provided path")]
NoRepo(#[from] FindRepoError),
#[error("Could not read payload TOML file")]
Io(#[from] std::io::Error),
#[error("Could not read payload TOML file")]
PayloadToml(#[from] toml::de::Error),
#[error("Invalid input")]
InvalidInput,
}
#[derive(Debug, thiserror::Error)]
pub enum FindRepoError {
#[error("IO error")]
Io(#[from] io::Error),
#[error("No repository found for given path")]
NotFound,
}
fn open_repo(path: &Path) -> Option<pahkat_types::repo::Repository> {
let file = fs::read_to_string(path.join("index.toml")).ok()?;
let repo: pahkat_types::repo::Repository = toml::from_str(&file).ok()?;
Some(repo)
}
fn find_repo(path: &Path) -> Result<&Path, FindRepoError> {
let mut path = path;
if path.ends_with("index.toml") {
path = path.parent().unwrap();
}
if let Some(_) = open_repo(path) {
return Ok(path);
}
while let Some(parent) = path.parent() {
path = parent;
if let Some(_) = open_repo(path) {
return Ok(path);
}
}
Err(FindRepoError::NotFound)
}
impl<'a> crate::Request for Request<'a> {
type Error = RequestError;
type Partial = PartialRequest<'a>;
fn new_from_user_input(partial: Self::Partial) -> Result<Self, Self::Error> {
use dialoguer::Input;
let repo_path = match partial.repo_path {
Some(path) => Cow::Borrowed(path),
None => Input::<String>::new()
.default(
std::env::current_dir()
.ok()
.and_then(|x| x.to_str().map(str::to_string))
.unwrap_or_else(|| ".".into()),
)
.with_prompt("Repository Path")
.interact()
.map(|p| Cow::Owned(PathBuf::from(p)))
.map_err(RequestError::PathError)?,
};
let _ = find_repo(&repo_path)?;
Ok(Request { repo_path })
}
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("Failed to read descriptor index: `{0}`")]
ReadFailed(PathBuf, #[source] io::Error),
#[error("Failed to read TOML file `{0}`")]
ReadToml(PathBuf, #[source] toml::de::Error),
#[error("Failed to create directory `{0}`")]
DirCreateFailed(PathBuf, #[source] io::Error),
#[error("Failed to write TOML file `{0}`")]
WriteToml(PathBuf, #[source] io::Error),
#[error("Failed to serialize TOML for `{0}`")]
SerializeToml(PathBuf, #[source] toml::ser::Error),
#[error("Could not find repository at provided path")]
NoRepo(#[from] FindRepoError),
}
pub fn nuke_releases<'a>(request: Request<'a>) -> Result<(), Error> {
log::debug!("{:?}", request);
let pkgs_dir = find_repo(&request.repo_path)?.join("packages");
let pkgs_paths = std::fs::read_dir(pkgs_dir).unwrap();
for pkg_path in pkgs_paths {
let path = pkg_path.unwrap().path();
if !path.is_dir() {
continue;
}
let pkg_path = path.join("index.toml");
let pkg_file = std::fs::read_to_string(&pkg_path)
.map_err(|e| Error::ReadFailed(pkg_path.clone(), e))?;
let mut descriptor: pahkat_types::package::Descriptor =
toml::from_str(&pkg_file).map_err(|e| Error::ReadToml(pkg_path.clone(), e))?;
descriptor.release = vec![];
// Write the toml
let data = toml::to_string_pretty(&descriptor)
.map_err(|e| Error::SerializeToml(pkg_path.clone(), e))?;
fs::write(&pkg_path, data).map_err(|e| Error::WriteToml(pkg_path.to_path_buf(), e))?;
log::info!("Wrote descriptor to {}", pkg_path.display());
}
Ok(())
}
|
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![no_std]
#![cfg_attr(feature = "stdbuild", feature(libc))]
extern crate libc;
include!("bindings.rs");
|
//! Memory map for STM32F30X microcontrollers
#![deny(missing_docs)]
#![deny(warnings)]
#![no_std]
extern crate volatile_register;
#[allow(missing_docs)]
pub mod btim;
#[allow(missing_docs)]
pub mod dbgmcu;
#[allow(missing_docs)]
pub mod gpio;
#[allow(missing_docs)]
pub mod gptim;
#[allow(missing_docs)]
pub mod i2c;
#[allow(missing_docs)]
pub mod rcc;
#[allow(missing_docs)]
pub mod spi;
#[allow(missing_docs)]
pub mod usart;
use btim::BTim;
use dbgmcu::Dbgmcu;
use gpio::Gpio;
use gptim::GpTim;
use i2c::I2c;
use rcc::Rcc;
use spi::Spi;
use usart::Usart;
const GPIOA: usize = 0x48000000;
const GPIOB: usize = 0x48000400;
const GPIOC: usize = 0x48000800;
const GPIOD: usize = 0x48000c00;
const GPIOE: usize = 0x48001000;
const GPIOF: usize = 0x48001400;
const GPIOG: usize = 0x48001800;
const GPIOH: usize = 0x48001c00;
// const TSC: usize = 0x40024000;
// const CRC: usize = 0x40023000;
// const Flash: usize = 0x40022000;
const RCC: usize = 0x40021000;
// const DMA1: usize = 0x40020000;
// const DMA2: usize = 0x40020400;
const TIM2: usize = 0x40000000;
const TIM3: usize = 0x40000400;
const TIM4: usize = 0x40000800;
// const TIM15: usize = 0x40014000;
// const TIM16: usize = 0x40014400;
// const TIM17: usize = 0x40014800;
const USART1: usize = 0x40013800;
const USART2: usize = 0x40004400;
const USART3: usize = 0x40004800;
// const UART4: usize = 0x40004c00;
// const UART5: usize = 0x40005000;
const SPI1: usize = 0x40013000;
// const SPI2: usize = 0x40003800;
// const SPI3: usize = 0x40003c00;
// const I2S2ext: usize = 0x40003400;
// const I2S3ext: usize = 0x40004000;
// const EXTI: usize = 0x40010400;
// const COMP: usize = 0x4001001c;
// const PWR: usize = 0x40007000;
// const CAN: usize = 0x40006400;
// const USB_FS: usize = 0x40005c00;
const I2C1: usize = 0x40005400;
// const I2C2: usize = 0x40005800;
// const IWDG: usize = 0x40003000;
// const WWDG: usize = 0x40002c00;
// const RTC: usize = 0x40002800;
const TIM6: usize = 0x40001000;
const TIM7: usize = 0x40001400;
// const DAC: usize = 0x40007400;
// const NVIC: usize = 0xe000e000;
// const FPU: usize = 0xe000ed88;
const DBGMCU: usize = 0xe0042000;
// const TIM1: usize = 0x40012c00;
// const TIM8: usize = 0x40013400;
// const ADC1: usize = 0x50000000;
// const ADC2: usize = 0x50000100;
// const ADC3: usize = 0x50000400;
// const ADC4: usize = 0x50000500;
// const ADC1_2: usize = 0x50000300;
// const ADC3_4: usize = 0x50000700;
// const SYSCFG: usize = 0x40010000;
// const OPAMP: usize = 0x40010038;
/// DBGMCU register block (&'static)
pub fn dbgmcu() -> &'static Dbgmcu {
unsafe { deref(DBGMCU) }
}
/// DBGMCU register block (&'static mut)
pub unsafe fn dbgmcu_mut() -> &'static mut Dbgmcu {
deref_mut(DBGMCU)
}
/// GPIOA register block (&'static)
pub fn gpioa() -> &'static Gpio {
unsafe { deref(GPIOA) }
}
/// GPIOA register block (&'static mut)
pub unsafe fn gpioa_mut() -> &'static mut Gpio {
deref_mut(GPIOA)
}
/// GPIOB register block (&'static)
pub fn gpiob() -> &'static Gpio {
unsafe { deref(GPIOB) }
}
/// GPIOB register block (&'static mut)
pub unsafe fn gpiob_mut() -> &'static mut Gpio {
deref_mut(GPIOB)
}
/// GPIOC register block (&'static)
pub fn gpioc() -> &'static Gpio {
unsafe { deref(GPIOC) }
}
/// GPIOC register block (&'static mut)
pub unsafe fn gpioc_mut() -> &'static mut Gpio {
deref_mut(GPIOC)
}
/// GPIOD register block (&'static)
pub fn gpiod() -> &'static Gpio {
unsafe { deref(GPIOD) }
}
/// GPIOD register block (&'static mut)
pub unsafe fn gpiod_mut() -> &'static mut Gpio {
deref_mut(GPIOD)
}
/// GPIOE register block (&'static)
pub fn gpioe() -> &'static Gpio {
unsafe { deref(GPIOE) }
}
/// GPIOE register block (&'static mut)
pub unsafe fn gpioe_mut() -> &'static mut Gpio {
deref_mut(GPIOE)
}
/// GPIOF register block (&'static)
pub fn gpiof() -> &'static Gpio {
unsafe { deref(GPIOF) }
}
/// GPIOF register block (&'static mut)
pub unsafe fn gpiof_mut() -> &'static mut Gpio {
deref_mut(GPIOF)
}
/// GPIOG register block (&'static)
pub fn gpiog() -> &'static Gpio {
unsafe { deref(GPIOG) }
}
/// GPIOG register block (&'static mut)
pub unsafe fn gpiog_mut() -> &'static mut Gpio {
deref_mut(GPIOG)
}
/// GPIOH register block (&'static)
pub fn gpioh() -> &'static Gpio {
unsafe { deref(GPIOH) }
}
/// GPIOH register block (&'static mut)
pub unsafe fn gpioh_mut() -> &'static mut Gpio {
deref_mut(GPIOH)
}
/// I2C1 register block (&'static)
pub fn i2c1() -> &'static I2c {
unsafe { deref(I2C1) }
}
/// I2C1 register block (&'static mut)
pub unsafe fn i2c1_mut() -> &'static mut I2c {
deref_mut(I2C1)
}
/// RCC register block (&'static)
pub fn rcc() -> &'static Rcc {
unsafe { deref(RCC) }
}
/// RCC register block (&'static mut)
pub unsafe fn rcc_mut() -> &'static mut Rcc {
deref_mut(RCC)
}
/// SPI1 register block (&'static)
pub fn spi1() -> &'static Spi {
unsafe { deref(SPI1) }
}
/// SPI1 register block (&'static mut)
pub unsafe fn spi1_mut() -> &'static mut Spi {
deref_mut(SPI1)
}
/// TIM2 register block (&'static)
pub fn tim2() -> &'static GpTim {
unsafe { deref(TIM2) }
}
/// TIM2 register block (&'static mut)
pub unsafe fn tim2_mut() -> &'static mut GpTim {
deref_mut(TIM2)
}
/// TIM3 register block (&'static)
pub fn tim3() -> &'static GpTim {
unsafe { deref(TIM3) }
}
/// TIM3 register block (&'static mut)
pub unsafe fn tim3_mut() -> &'static mut GpTim {
deref_mut(TIM3)
}
/// TIM4 register block (&'static)
pub fn tim4() -> &'static GpTim {
unsafe { deref(TIM4) }
}
/// TIM4 register block (&'static mut)
pub unsafe fn tim4_mut() -> &'static mut GpTim {
deref_mut(TIM4)
}
/// TIM6 register block (&'static)
pub fn tim6() -> &'static BTim {
unsafe { deref(TIM6) }
}
/// TIM6 register block (&'static mut)
pub unsafe fn tim6_mut() -> &'static mut BTim {
deref_mut(TIM6)
}
/// TIM7 register block (&'static)
pub fn tim7() -> &'static BTim {
unsafe { deref(TIM7) }
}
/// TIM7 register block (&'static mut)
pub unsafe fn tim7_mut() -> &'static mut BTim {
deref_mut(TIM7)
}
/// USART1 register block (&'static)
pub fn usart1() -> &'static Usart {
unsafe { deref(USART1) }
}
/// USART1 register block (&'static mut)
pub unsafe fn usart1_mut() -> &'static mut Usart {
deref_mut(USART1)
}
/// USART2 register block (&'static)
pub fn usart2() -> &'static Usart {
unsafe { deref(USART2) }
}
/// USART2 register block (&'static mut)
pub unsafe fn usart2_mut() -> &'static mut Usart {
deref_mut(USART2)
}
/// USART3 register block (&'static)
pub fn usart3() -> &'static Usart {
unsafe { deref(USART3) }
}
/// USART3 register block (&'static mut)
pub unsafe fn usart3_mut() -> &'static mut Usart {
deref_mut(USART3)
}
unsafe fn deref<T>(address: usize) -> &'static T {
&*(address as *const T)
}
unsafe fn deref_mut<T>(address: usize) -> &'static mut T {
&mut *(address as *mut T)
}
// Here we extend the peripheral API -- AKA ~~svd2rust is~~ SVD files are great
// but not perfect
use core::ptr;
impl spi::Dr {
/// Reads a byte (`u8`) from this register
pub fn read_u8(&self) -> u8 {
unsafe { ptr::read_volatile(self as *const _ as *const u8) }
}
/// Writes a byte (`u8`) to this register
pub fn write_u8(&mut self, value: u8) {
unsafe { ptr::write_volatile(self as *mut _ as *mut u8, value) }
}
}
|
fn main() {
tonic_build::configure()
.compile(
&[
"proto/api/agents.proto",
"proto/api/login.proto",
"proto/api/routers.proto",
"proto/api/tunnels.proto",
"proto/api/users.proto",
"proto/api/permissions.proto",
"proto/api/permission_membership.proto",
],
&["proto"],
)
.unwrap_or_else(|e| panic!("Failed to compile protos {:?}", e));
}
|
use std::mem;
use crate::*;
pub fn place_clues <'a> (
line: & 'a Line,
clues: & 'a [LineSize],
) -> CluesPlacerIter <'a> {
CluesPlacer::new (line, clues).into_iter ()
}
#[ derive (Default) ]
pub struct CluesPlacer <'a> {
cache: Cache,
stack: Vec <Frame <'a>>,
line: & 'a Line,
clues: & 'a [LineSize],
started: bool,
}
struct Frame <'a> {
offset: LineSize,
clue_placer: CluePlacer <'a>,
position: LineSize,
found: bool,
}
impl <'a> CluesPlacer <'a> {
pub fn new (
line: & 'a Line,
clues: & 'a [LineSize],
) -> CluesPlacer <'a> {
println! ("CluesPlacer::new");
CluesPlacer {
cache: Cache::new (clues.len (), line.len ()),
stack: Vec::with_capacity (clues.len ()),
line: line,
clues: clues,
started: false,
}
}
pub fn into_default <'b> (
self,
) -> CluesPlacer <'static> {
CluesPlacer {
cache: self.cache,
stack: unsafe { mem::transmute (self.stack.into_default ()) },
line: Default::default (),
clues: Default::default (),
started: false,
}
}
pub fn into_new <'b> (
self,
line: & 'b Line,
clues: & 'b [LineSize],
) -> CluesPlacer <'b> {
let copy = self.into_default ();
CluesPlacer {
cache: copy.cache.into_new (clues.len (), line.len ()),
stack: copy.stack,
line: line,
clues: clues,
started: false,
}
}
pub fn advance (& mut self) -> bool {
if ! self.started {
self.started = true;
if self.push (0, 0) {
return true;
}
}
loop {
let mut frame = match self.stack.pop () {
Some (val) => val,
None => return false,
};
frame.position = match frame.clue_placer.next () {
Some (val) => val + frame.offset,
None => {
if ! frame.found {
self.cache.mark_bad (self.stack.len (), frame.offset);
}
continue;
},
};
let frame_depth = self.stack.len ();
let frame_position = frame.position;
self.stack.push (frame);
if self.push (
frame_depth + 1,
frame_position + self.clues [frame_depth] + 1,
) {
for frame in self.stack.iter_mut () {
frame.found = true;
}
return true;
}
}
}
pub fn current (& 'a self) -> impl Iterator <Item = LineSize> + 'a {
self.stack.iter ().map (
|frame| frame.position
)
}
fn push (
& mut self,
depth: usize,
offset: LineSize,
) -> bool {
if self.clues.len () == depth {
return self.line.iter ().skip (offset as usize).all (Cell::can_empty);
}
if offset + self.clues [depth] > self.line.len () {
return false;
}
if self.cache.is_bad (depth, offset) {
return false;
}
let clue_placer = place_clue (
& self.line [offset .. ],
self.clues [depth],
);
self.stack.push (Frame {
offset: offset,
clue_placer: clue_placer,
position: 0,
found: false,
});
false
}
}
impl <'a> IntoIterator for CluesPlacer <'a> {
type Item = Vec <LineSize>;
type IntoIter = CluesPlacerIter <'a>;
fn into_iter (self) -> CluesPlacerIter <'a> {
CluesPlacerIter {
inner: self,
}
}
}
pub struct CluesPlacerIter <'a> {
inner: CluesPlacer <'a>,
}
impl <'a> Iterator for CluesPlacerIter <'a> {
type Item = Vec <LineSize>;
fn next (& mut self) -> Option <Vec <LineSize>> {
if self.inner.advance () {
Some (self.inner.current ().collect ())
} else {
None
}
}
}
#[ derive (Default) ]
struct Cache {
data: Vec <bool>,
line_size: LineSize,
}
impl Cache {
fn new (
num_clues: usize,
line_size: LineSize,
) -> Cache {
let cache: Cache = Default::default ();
cache.into_new (
num_clues,
line_size,
)
}
fn into_new (
mut self,
num_clues: usize,
line_size: LineSize,
) -> Cache {
self.data.truncate (0);
self.data.resize (num_clues * line_size as usize, false);
self.line_size = line_size;
self
}
fn is_bad (
& self,
clue_index: usize,
cell_index: LineSize,
) -> bool {
self.data [
clue_index * self.line_size as usize + cell_index as usize
]
}
fn mark_bad (
& mut self,
clue_index: usize,
cell_index: LineSize,
) {
self.data [
clue_index * self.line_size as usize + cell_index as usize
] = true
}
}
#[ cfg (test) ]
mod tests {
use super::*;
#[ test ]
fn test_place_clues_1 () {
assert_eq! (
place_clues (
& LineBuf::from_str (" ----").unwrap (),
& vec! [ 3 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ 1 ],
vec! [ 2 ],
],
);
}
#[ test ]
fn test_place_clues_2 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("----- ----").unwrap (),
& vec! [ 3, 4 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ 0, 6 ],
vec! [ 1, 6 ],
vec! [ 2, 6 ],
],
);
}
#[ test ]
fn test_place_clues_3 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("--- #-----").unwrap (),
& vec! [ 2, 3 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ 0, 4 ],
vec! [ 1, 4 ],
vec! [ 4, 7 ],
],
);
}
#[ test ]
fn test_place_clues_4 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("----# ----").unwrap (),
& vec! [ 3, 4 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ 2, 6 ],
],
);
}
#[ test ]
fn test_place_clues_5 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("-#- -#-").unwrap (),
& vec! [ 3 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [ ] as Vec <Vec <LineSize>>,
);
}
#[ test ]
fn test_place_clues_6 () {
assert_eq! (
place_clues (
& LineBuf::from_str (" ### #-- #-- ").unwrap (),
& vec! [ 3, 3 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [ ] as Vec <Vec <LineSize>>,
);
}
#[ test ]
fn test_place_clues_7 () {
assert_eq! (
place_clues (
& LineBuf::from_str (" ### --- #-- ").unwrap (),
& vec! [ 3, 3 ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ 1, 15 ],
],
);
}
#[ test ]
fn test_place_clues_8 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("-----").unwrap (),
& vec! [ ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [
vec! [ ],
],
);
}
#[ test ]
fn test_place_clues_9 () {
assert_eq! (
place_clues (
& LineBuf::from_str ("--#--").unwrap (),
& vec! [ ],
).collect::<Vec <Vec <LineSize>>> (),
vec! [ ] as Vec <Vec <LineSize>>,
);
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod billing_accounts {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
expand: Option<&str>,
) -> std::result::Result<BillingAccountListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Billing/billingAccounts", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: BillingAccountListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingAccount, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BillingAccount = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
parameters: &BillingAccountUpdateRequest,
) -> std::result::Result<update::Response, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: BillingAccount = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(BillingAccount),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_invoice_sections_by_create_subscription_permission(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<InvoiceSectionListWithCreateSubPermissionResult, list_invoice_sections_by_create_subscription_permission::Error>
{
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/listInvoiceSectionsWithCreateSubscriptionPermission",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_invoice_sections_by_create_subscription_permission::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder
.build()
.context(list_invoice_sections_by_create_subscription_permission::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(list_invoice_sections_by_create_subscription_permission::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(list_invoice_sections_by_create_subscription_permission::ResponseBytesError)?;
let rsp_value: InvoiceSectionListWithCreateSubPermissionResult = serde_json::from_slice(&body)
.context(list_invoice_sections_by_create_subscription_permission::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(list_invoice_sections_by_create_subscription_permission::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body)
.context(list_invoice_sections_by_create_subscription_permission::DeserializeError { body })?;
list_invoice_sections_by_create_subscription_permission::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_invoice_sections_by_create_subscription_permission {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod address {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn validate(
operation_config: &crate::OperationConfig,
address: &AddressDetails,
) -> std::result::Result<ValidateAddressResponse, validate::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Billing/validateAddress", &operation_config.base_path,);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(validate::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(address);
let req = req_builder.build().context(validate::BuildRequestError)?;
let rsp = client.execute(req).await.context(validate::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
let rsp_value: ValidateAddressResponse = serde_json::from_slice(&body).context(validate::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(validate::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(validate::DeserializeError { body })?;
validate::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod validate {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod available_balances {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<AvailableBalance, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/availableBalance/default",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: AvailableBalance = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod instructions {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<InstructionListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: InstructionListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
instruction_name: &str,
) -> std::result::Result<Instruction, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, instruction_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Instruction = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn put(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
instruction_name: &str,
parameters: &Instruction,
) -> std::result::Result<Instruction, put::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/instructions/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, instruction_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(put::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(put::BuildRequestError)?;
let rsp = client.execute(req).await.context(put::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?;
let rsp_value: Instruction = serde_json::from_slice(&body).context(put::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(put::DeserializeError { body })?;
put::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod put {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_profiles {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingProfileListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingProfileListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
expand: Option<&str>,
) -> std::result::Result<BillingProfile, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BillingProfile = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
parameters: &BillingProfile,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: BillingProfile = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(BillingProfile),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod customers {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
search: Option<&str>,
filter: Option<&str>,
) -> std::result::Result<CustomerListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/customers",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(search) = search {
req_builder = req_builder.query(&[("$search", search)]);
}
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: CustomerListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
search: Option<&str>,
filter: Option<&str>,
) -> std::result::Result<CustomerListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(search) = search {
req_builder = req_builder.query(&[("$search", search)]);
}
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: CustomerListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
expand: Option<&str>,
) -> std::result::Result<Customer, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Customer = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod invoice_sections {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<InvoiceSectionListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: InvoiceSectionListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<InvoiceSection, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: InvoiceSection = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
parameters: &InvoiceSection,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: InvoiceSection = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(InvoiceSection),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_permissions {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_customer::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/billingPermissions",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_customer::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_customer::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_customer::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: BillingPermissionsListResult =
serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
list_by_customer::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingPermissions",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingPermissionsListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_invoice_sections(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_invoice_sections::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingPermissions",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice_sections::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_invoice_sections::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice_sections::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_sections::ResponseBytesError)?;
let rsp_value: BillingPermissionsListResult =
serde_json::from_slice(&body).context(list_by_invoice_sections::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_sections::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(list_by_invoice_sections::DeserializeError { body })?;
list_by_invoice_sections::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice_sections {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingPermissionsListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingPermissions",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingPermissionsListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_subscriptions {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_customer::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/billingSubscriptions",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_customer::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_customer::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_customer::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: BillingSubscriptionsListResult =
serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
list_by_customer::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingSubscriptionsListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingSubscriptions",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingSubscriptionsListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingSubscriptionsListResult, list_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingSubscriptions",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingSubscriptionsListResult =
serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
list_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
) -> std::result::Result<BillingSubscription, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}",
&operation_config.base_path, billing_account_name, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BillingSubscription = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &BillingSubscription,
) -> std::result::Result<BillingSubscription, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}",
&operation_config.base_path, billing_account_name, subscription_id
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: BillingSubscription = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn move_(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &TransferBillingSubscriptionRequestProperties,
) -> std::result::Result<move_::Response, move_::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}/move",
&operation_config.base_path, billing_account_name, subscription_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(move_::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(move_::BuildRequestError)?;
let rsp = client.execute(req).await.context(move_::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
let rsp_value: BillingSubscription = serde_json::from_slice(&body).context(move_::DeserializeError { body })?;
Ok(move_::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(move_::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(move_::DeserializeError { body })?;
move_::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod move_ {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(BillingSubscription),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn validate_move(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
subscription_id: &str,
parameters: &TransferBillingSubscriptionRequestProperties,
) -> std::result::Result<ValidateSubscriptionTransferEligibilityResult, validate_move::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingSubscriptions/{}/validateMoveEligibility",
&operation_config.base_path, billing_account_name, subscription_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(validate_move::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(validate_move::BuildRequestError)?;
let rsp = client.execute(req).await.context(validate_move::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(validate_move::ResponseBytesError)?;
let rsp_value: ValidateSubscriptionTransferEligibilityResult =
serde_json::from_slice(&body).context(validate_move::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(validate_move::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(validate_move::DeserializeError { body })?;
validate_move::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod validate_move {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod products {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<ProductsListResult, list_by_customer::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/products",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_customer::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_customer::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_customer::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: ProductsListResult = serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_customer::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_customer::DeserializeError { body })?;
list_by_customer::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_customer {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ProductsListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/products",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ProductsListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
filter: Option<&str>,
) -> std::result::Result<ProductsListResult, list_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/products",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
let req = req_builder.build().context(list_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: ProductsListResult =
serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
list_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
) -> std::result::Result<Product, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}",
&operation_config.base_path, billing_account_name, product_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Product = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &Product,
) -> std::result::Result<Product, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}",
&operation_config.base_path, billing_account_name, product_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Product = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn move_(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &TransferProductRequestProperties,
) -> std::result::Result<move_::Response, move_::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}/move",
&operation_config.base_path, billing_account_name, product_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(move_::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(move_::BuildRequestError)?;
let rsp = client.execute(req).await.context(move_::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
let rsp_value: Product = serde_json::from_slice(&body).context(move_::DeserializeError { body })?;
Ok(move_::Response::Ok200(rsp_value))
}
StatusCode::ACCEPTED => Ok(move_::Response::Accepted202),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(move_::DeserializeError { body })?;
move_::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod move_ {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Product),
Accepted202,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn validate_move(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
product_name: &str,
parameters: &TransferProductRequestProperties,
) -> std::result::Result<ValidateProductTransferEligibilityResult, validate_move::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/products/{}/validateMoveEligibility",
&operation_config.base_path, billing_account_name, product_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(validate_move::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(validate_move::BuildRequestError)?;
let rsp = client.execute(req).await.context(validate_move::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(validate_move::ResponseBytesError)?;
let rsp_value: ValidateProductTransferEligibilityResult =
serde_json::from_slice(&body).context(validate_move::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(validate_move::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(validate_move::DeserializeError { body })?;
validate_move::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod validate_move {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod invoices {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("periodStartDate", period_start_date)]);
req_builder = req_builder.query(&[("periodEndDate", period_end_date)]);
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: InvoiceListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoices",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("periodStartDate", period_start_date)]);
req_builder = req_builder.query(&[("periodEndDate", period_end_date)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: InvoiceListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
) -> std::result::Result<Invoice, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}",
&operation_config.base_path, billing_account_name, invoice_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Invoice = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_id(
operation_config: &crate::OperationConfig,
invoice_name: &str,
) -> std::result::Result<Invoice, get_by_id::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/invoices/{}",
&operation_config.base_path, invoice_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_id::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_id::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_id::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_id::ResponseBytesError)?;
let rsp_value: Invoice = serde_json::from_slice(&body).context(get_by_id::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_id::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_id::DeserializeError { body })?;
get_by_id::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_id {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn download_invoice(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
download_token: &str,
) -> std::result::Result<download_invoice::Response, download_invoice::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}/download",
&operation_config.base_path, billing_account_name, invoice_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(download_invoice::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("downloadToken", download_token)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(download_invoice::BuildRequestError)?;
let rsp = client.execute(req).await.context(download_invoice::ExecuteRequestError)?;
match rsp.status() {
StatusCode::ACCEPTED => Ok(download_invoice::Response::Accepted202),
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(download_invoice::ResponseBytesError)?;
let rsp_value: DownloadUrl = serde_json::from_slice(&body).context(download_invoice::DeserializeError { body })?;
Ok(download_invoice::Response::Ok200(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(download_invoice::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(download_invoice::DeserializeError { body })?;
download_invoice::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod download_invoice {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn download_multiple_billing_profile_invoices(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
download_urls: &Vec<&str>,
) -> std::result::Result<download_multiple_billing_profile_invoices::Response, download_multiple_billing_profile_invoices::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/downloadDocuments",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(download_multiple_billing_profile_invoices::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(download_urls);
let req = req_builder
.build()
.context(download_multiple_billing_profile_invoices::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(download_multiple_billing_profile_invoices::ExecuteRequestError)?;
match rsp.status() {
StatusCode::ACCEPTED => Ok(download_multiple_billing_profile_invoices::Response::Accepted202),
StatusCode::OK => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_multiple_billing_profile_invoices::ResponseBytesError)?;
let rsp_value: DownloadUrl =
serde_json::from_slice(&body).context(download_multiple_billing_profile_invoices::DeserializeError { body })?;
Ok(download_multiple_billing_profile_invoices::Response::Ok200(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_multiple_billing_profile_invoices::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(download_multiple_billing_profile_invoices::DeserializeError { body })?;
download_multiple_billing_profile_invoices::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod download_multiple_billing_profile_invoices {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
period_start_date: &str,
period_end_date: &str,
) -> std::result::Result<InvoiceListResult, list_by_billing_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("periodStartDate", period_start_date)]);
req_builder = req_builder.query(&[("periodEndDate", period_end_date)]);
let req = req_builder.build().context(list_by_billing_subscription::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(list_by_billing_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_subscription::ResponseBytesError)?;
let rsp_value: InvoiceListResult =
serde_json::from_slice(&body).context(list_by_billing_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_subscription::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(list_by_billing_subscription::DeserializeError { body })?;
list_by_billing_subscription::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_subscription_and_invoice_id(
operation_config: &crate::OperationConfig,
subscription_id: &str,
invoice_name: &str,
) -> std::result::Result<Invoice, get_by_subscription_and_invoice_id::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices/{}",
&operation_config.base_path, subscription_id, invoice_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_subscription_and_invoice_id::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_subscription_and_invoice_id::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(get_by_subscription_and_invoice_id::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_subscription_and_invoice_id::ResponseBytesError)?;
let rsp_value: Invoice =
serde_json::from_slice(&body).context(get_by_subscription_and_invoice_id::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_subscription_and_invoice_id::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(get_by_subscription_and_invoice_id::DeserializeError { body })?;
get_by_subscription_and_invoice_id::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_subscription_and_invoice_id {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn download_billing_subscription_invoice(
operation_config: &crate::OperationConfig,
subscription_id: &str,
invoice_name: &str,
download_token: &str,
) -> std::result::Result<download_billing_subscription_invoice::Response, download_billing_subscription_invoice::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/invoices/{}/download",
&operation_config.base_path, subscription_id, invoice_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(download_billing_subscription_invoice::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.query(&[("downloadToken", download_token)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder
.build()
.context(download_billing_subscription_invoice::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(download_billing_subscription_invoice::ExecuteRequestError)?;
match rsp.status() {
StatusCode::ACCEPTED => Ok(download_billing_subscription_invoice::Response::Accepted202),
StatusCode::OK => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_billing_subscription_invoice::ResponseBytesError)?;
let rsp_value: DownloadUrl =
serde_json::from_slice(&body).context(download_billing_subscription_invoice::DeserializeError { body })?;
Ok(download_billing_subscription_invoice::Response::Ok200(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_billing_subscription_invoice::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(download_billing_subscription_invoice::DeserializeError { body })?;
download_billing_subscription_invoice::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod download_billing_subscription_invoice {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn download_multiple_billing_subscription_invoices(
operation_config: &crate::OperationConfig,
subscription_id: &str,
download_urls: &Vec<&str>,
) -> std::result::Result<
download_multiple_billing_subscription_invoices::Response,
download_multiple_billing_subscription_invoices::Error,
> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/default/billingSubscriptions/{}/downloadDocuments",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(download_multiple_billing_subscription_invoices::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(download_urls);
let req = req_builder
.build()
.context(download_multiple_billing_subscription_invoices::BuildRequestError)?;
let rsp = client
.execute(req)
.await
.context(download_multiple_billing_subscription_invoices::ExecuteRequestError)?;
match rsp.status() {
StatusCode::ACCEPTED => Ok(download_multiple_billing_subscription_invoices::Response::Accepted202),
StatusCode::OK => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_multiple_billing_subscription_invoices::ResponseBytesError)?;
let rsp_value: DownloadUrl =
serde_json::from_slice(&body).context(download_multiple_billing_subscription_invoices::DeserializeError { body })?;
Ok(download_multiple_billing_subscription_invoices::Response::Ok200(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp
.bytes()
.await
.context(download_multiple_billing_subscription_invoices::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(download_multiple_billing_subscription_invoices::DeserializeError { body })?;
download_multiple_billing_subscription_invoices::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod download_multiple_billing_subscription_invoices {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Accepted202,
Ok200(DownloadUrl),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod transactions {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_invoice(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
invoice_name: &str,
) -> std::result::Result<TransactionListResult, list_by_invoice::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/invoices/{}/transactions",
&operation_config.base_path, billing_account_name, invoice_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_invoice::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice::ResponseBytesError)?;
let rsp_value: TransactionListResult = serde_json::from_slice(&body).context(list_by_invoice::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_invoice::DeserializeError { body })?;
list_by_invoice::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod policies {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<Policy, get_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/policies/default",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: Policy = serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
get_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
parameters: &Policy,
) -> std::result::Result<Policy, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/policies/default",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Policy = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
) -> std::result::Result<CustomerPolicy, get_by_customer::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/policies/default",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_customer::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_customer::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_customer::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_customer::ResponseBytesError)?;
let rsp_value: CustomerPolicy = serde_json::from_slice(&body).context(get_by_customer::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_customer::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_customer::DeserializeError { body })?;
get_by_customer::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_customer {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update_customer(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
customer_name: &str,
parameters: &CustomerPolicy,
) -> std::result::Result<CustomerPolicy, update_customer::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/customers/{}/policies/default",
&operation_config.base_path, billing_account_name, customer_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update_customer::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update_customer::BuildRequestError)?;
let rsp = client.execute(req).await.context(update_customer::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update_customer::ResponseBytesError)?;
let rsp_value: CustomerPolicy = serde_json::from_slice(&body).context(update_customer::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update_customer::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update_customer::DeserializeError { body })?;
update_customer::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update_customer {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_property {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(operation_config: &crate::OperationConfig, subscription_id: &str) -> std::result::Result<BillingProperty, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingProperty/default",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BillingProperty = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
parameters: &BillingProperty,
) -> std::result::Result<BillingProperty, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingProperty/default",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: BillingProperty = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod operations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Billing/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_role_definitions {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleDefinitions/{}",
&operation_config.base_path, billing_account_name, billing_role_definition_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingRoleDefinition =
serde_json::from_slice(&body).context(get_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_billing_account::DeserializeError { body })?;
get_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleDefinitions/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name, billing_role_definition_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingRoleDefinition =
serde_json::from_slice(&body).context(get_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_invoice_section::DeserializeError { body })?;
get_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_definition_name: &str,
) -> std::result::Result<BillingRoleDefinition, get_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleDefinitions/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, billing_role_definition_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingRoleDefinition =
serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
get_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleDefinitions",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingRoleDefinitionListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleDefinitions",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingRoleDefinitionListResult =
serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
list_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingRoleDefinitionListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleDefinitions",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingRoleDefinitionListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_role_assignments {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_role_assignment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(get_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_billing_account::DeserializeError { body })?;
get_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_role_assignment_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(delete_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(delete_by_billing_account::DeserializeError { body })?;
delete_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name, billing_role_assignment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(get_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_invoice_section::DeserializeError { body })?;
get_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name, billing_role_assignment_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(delete_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(delete_by_invoice_section::DeserializeError { body })?;
delete_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, get_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, billing_role_assignment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(get_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get_by_billing_profile::DeserializeError { body })?;
get_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
billing_role_assignment_name: &str,
) -> std::result::Result<BillingRoleAssignment, delete_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments/{}",
&operation_config.base_path, billing_account_name, billing_profile_name, billing_role_assignment_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingRoleAssignment =
serde_json::from_slice(&body).context(delete_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse =
serde_json::from_slice(&body).context(delete_by_billing_profile::DeserializeError { body })?;
delete_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingRoleAssignments",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: BillingRoleAssignmentListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_invoice_section(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
invoice_section_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_invoice_section::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/invoiceSections/{}/billingRoleAssignments",
&operation_config.base_path, billing_account_name, billing_profile_name, invoice_section_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_invoice_section::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_invoice_section::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_invoice_section::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: BillingRoleAssignmentListResult =
serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_invoice_section::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_invoice_section::DeserializeError { body })?;
list_by_invoice_section::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_invoice_section {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_billing_profile(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
billing_profile_name: &str,
) -> std::result::Result<BillingRoleAssignmentListResult, list_by_billing_profile::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/billingProfiles/{}/billingRoleAssignments",
&operation_config.base_path, billing_account_name, billing_profile_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_profile::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_billing_profile::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_profile::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: BillingRoleAssignmentListResult =
serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_profile::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_profile::DeserializeError { body })?;
list_by_billing_profile::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_profile {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod agreements {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_billing_account(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
expand: Option<&str>,
) -> std::result::Result<AgreementListResult, list_by_billing_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/agreements",
&operation_config.base_path, billing_account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_billing_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(list_by_billing_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_billing_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: AgreementListResult =
serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_billing_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_billing_account::DeserializeError { body })?;
list_by_billing_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_billing_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
billing_account_name: &str,
agreement_name: &str,
expand: Option<&str>,
) -> std::result::Result<Agreement, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/billingAccounts/{}/agreements/{}",
&operation_config.base_path, billing_account_name, agreement_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(expand) = expand {
req_builder = req_builder.query(&[("$expand", expand)]);
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Agreement = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod enrollment_accounts {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<EnrollmentAccountListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Billing/enrollmentAccounts", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: EnrollmentAccountListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(operation_config: &crate::OperationConfig, name: &str) -> std::result::Result<EnrollmentAccountSummary, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/providers/Microsoft.Billing/enrollmentAccounts/{}",
&operation_config.base_path, name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: EnrollmentAccountSummary = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod billing_periods {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
skiptoken: Option<&str>,
top: Option<i64>,
) -> std::result::Result<BillingPeriodsListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingPeriods",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(filter) = filter {
req_builder = req_builder.query(&[("$filter", filter)]);
}
if let Some(skiptoken) = skiptoken {
req_builder = req_builder.query(&[("$skiptoken", skiptoken)]);
}
if let Some(top) = top {
req_builder = req_builder.query(&[("$top", top)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: BillingPeriodsListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
list::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
billing_period_name: &str,
) -> std::result::Result<BillingPeriod, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Billing/billingPeriods/{}",
&operation_config.base_path, subscription_id, billing_period_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: BillingPeriod = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
|
use crate::protos::cao_commands::TakeRoomCommand;
use anyhow::Context;
use caolo_sim::prelude::*;
use thiserror::Error;
use tracing::{info, trace};
use uuid::Uuid;
#[derive(Debug, Error)]
pub enum TakeRoomError {
#[error("Target room already has an owner")]
Owned,
#[error("Maximum number of rooms ({0}) owned already")]
MaxRoomsExceeded(usize),
#[error("Internal error: {0}")]
InternalError(anyhow::Error),
#[error("User by id {0} was not registered")]
NotRegistered(Uuid),
#[error("Missing expected field {0}")]
MissingField(&'static str),
#[error("Failed to parse uuid {0}")]
UuidError(anyhow::Error),
}
pub fn take_room(world: &mut World, msg: &TakeRoomCommand) -> Result<(), TakeRoomError> {
trace!("Taking room");
let user_id = msg
.user_id
.as_ref()
.ok_or(TakeRoomError::MissingField("user_id"))?
.data
.as_slice();
let user_id =
uuid::Uuid::from_slice(user_id).map_err(|err| TakeRoomError::UuidError(err.into()))?;
let room_id = msg
.room_id
.as_ref()
.ok_or(TakeRoomError::MissingField("room_id"))?;
let room_id = Axial::new(room_id.q, room_id.r);
let span = tracing::error_span!(
"take_room",
user_id = %user_id,
room_id = %room_id
);
let _e = span.enter();
info!("Attempting to take room");
let has_owner = world.view::<Axial, OwnedEntity>().contains_key(room_id);
if has_owner {
info!("Room is taken");
return Err(TakeRoomError::Owned);
}
let rooms = world
.view::<UserId, Rooms>()
.reborrow()
.get(UserId(user_id));
let num_rooms = rooms.map(|x| x.0.len()).unwrap_or(0);
let props = world
.view::<UserId, UserProperties>()
.reborrow()
.get(UserId(user_id));
let available_rooms = match props.map(|p| p.level) {
Some(l) => l,
None => {
info!("Room is not registered");
return Err(TakeRoomError::NotRegistered(user_id));
}
};
if num_rooms > available_rooms as usize {
info!("User would exceed max rooms");
return Err(TakeRoomError::MaxRoomsExceeded(available_rooms as usize));
}
let mut rooms = rooms.cloned().unwrap_or_default();
rooms.0.push(Room(room_id));
world
.unsafe_view::<Axial, OwnedEntity>()
.insert(
room_id,
OwnedEntity {
owner_id: UserId(user_id),
},
)
.with_context(|| "Failed to insert the new owner")
.map_err(TakeRoomError::InternalError)?;
world
.unsafe_view::<UserId, Rooms>()
.insert(UserId(user_id), rooms);
Ok(())
}
|
//! Utilities for *entities* in ECS.
use slotmap::{new_key_type, SlotMap};
new_key_type! {
/// Unique identifier of the *entity* of ECS.
pub struct Entity;
}
/// Storage for all entities of ECS.
pub type EntityStorage = SlotMap<Entity, ()>;
|
use clap::{App, Arg, ArgMatches, SubCommand};
pub(crate) fn get_matches<'a>() -> ArgMatches<'a> {
App::new(rustimate_core::APPNAME)
.version("0.0.20")
.author(clap::crate_authors!())
.about("Starts the HTTP server and (optionally) opens a webview")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("DIRECTORY")
.help("Sets a custom config directory (defaults to \".\")")
.takes_value(true)
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.help("Display more output")
.takes_value(false)
)
.subcommand(
SubCommand::with_name("server")
.arg(
Arg::with_name("address")
.short("a")
.long("address")
.value_name("IP_ADDRESS")
.help("Defines the host to listen on (defaults to \"127.0.0.1\")")
.takes_value(true)
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.value_name("PORT_NUM")
.help(&format!(
"Configures the server to use the provided port (defaults to {})",
crate::cfg::DEFAULT_PORT
))
.takes_value(true)
)
)
.get_matches()
}
|
pub mod buffer;
pub mod data;
pub mod linalg;
pub mod shader;
|
#![no_std]
#![feature(test)]
#[macro_use]
extern crate digest;
extern crate sha2ni;
bench!(sha2ni::Sha256);
|
fn main(){
let k = String::from("Hi");
let b = String::from("!");
let z = k + &b;
println!("{}", z);
} |
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::{anyhow, Context as _, Error},
fidl_fuchsia_update::{
CheckOptions, CommitStatusProviderMarker, CommitStatusProviderProxy, Initiator,
ManagerMarker, ManagerProxy, MonitorMarker, MonitorRequest, MonitorRequestStream,
},
fidl_fuchsia_update_channelcontrol::{ChannelControlMarker, ChannelControlProxy},
fidl_fuchsia_update_ext::State,
fidl_fuchsia_update_installer::{InstallerMarker, RebootControllerMarker},
fidl_fuchsia_update_installer_ext::{self as installer, start_update, Options, StateId},
fuchsia_async as fasync,
fuchsia_component::client::connect_to_service,
fuchsia_url::pkg_url::PkgUrl,
fuchsia_zircon as zx,
futures::{future::FusedFuture, prelude::*},
std::time::Duration,
};
mod args;
const WARNING_DURATION: Duration = Duration::from_secs(30);
fn print_state(state: &State) {
println!("State: {:?}", state);
}
async fn monitor_state(mut stream: MonitorRequestStream) -> Result<(), Error> {
while let Some(event) = stream.try_next().await? {
match event {
MonitorRequest::OnState { state, responder } => {
responder.send()?;
let state = State::from(state);
// Exit if we encounter an error during an update.
if state.is_error() {
anyhow::bail!("Update failed: {:?}", state)
} else {
print_state(&state);
}
}
}
}
Ok(())
}
async fn handle_channel_control_cmd(
cmd: args::channel::Command,
channel_control: ChannelControlProxy,
) -> Result<(), Error> {
match cmd {
args::channel::Command::Get(_) => {
let channel = channel_control.get_current().await?;
println!("current channel: {}", channel);
}
args::channel::Command::Target(_) => {
let channel = channel_control.get_target().await?;
println!("target channel: {}", channel);
}
args::channel::Command::Set(args::channel::Set { channel }) => {
channel_control.set_target(&channel).await?;
}
args::channel::Command::List(_) => {
let channels = channel_control.get_target_list().await?;
if channels.is_empty() {
println!("known channels list is empty.");
} else {
println!("known channels:");
for channel in channels {
println!("{}", channel);
}
}
}
}
Ok(())
}
async fn handle_check_now_cmd(
cmd: args::CheckNow,
update_manager: ManagerProxy,
) -> Result<(), Error> {
let args::CheckNow { service_initiated, monitor } = cmd;
let options = CheckOptions {
initiator: Some(if service_initiated { Initiator::Service } else { Initiator::User }),
allow_attaching_to_existing_update_check: Some(true),
..CheckOptions::EMPTY
};
let (monitor_client, monitor_server) = if monitor {
let (client_end, request_stream) =
fidl::endpoints::create_request_stream::<MonitorMarker>()?;
(Some(client_end), Some(request_stream))
} else {
(None, None)
};
if let Err(e) = update_manager.check_now(options, monitor_client).await? {
anyhow::bail!("Update check failed to start: {:?}", e);
}
println!("Checking for an update.");
if let Some(monitor_server) = monitor_server {
monitor_state(monitor_server).await?;
}
Ok(())
}
async fn force_install(
update_pkg_url: String,
reboot: bool,
service_initiated: bool,
) -> Result<(), Error> {
let pkgurl = PkgUrl::parse(&update_pkg_url).context("parsing update package url")?;
let options = Options {
initiator: if service_initiated {
installer::Initiator::Service
} else {
installer::Initiator::User
},
should_write_recovery: true,
allow_attach_to_existing_attempt: true,
};
let proxy = connect_to_service::<InstallerMarker>()
.context("connecting to fuchsia.update.installer")?;
let (reboot_controller, reboot_controller_server_end) =
fidl::endpoints::create_proxy::<RebootControllerMarker>()
.context("creating reboot controller")?;
let mut update_attempt =
start_update(&pkgurl, options, &proxy, Some(reboot_controller_server_end))
.await
.context("starting update")?;
println!("Installing an update.");
if !reboot {
reboot_controller.detach().context("notify installer do not reboot")?;
}
while let Some(state) = update_attempt.try_next().await.context("getting next state")? {
println!("State: {:?}", state);
if state.id() == StateId::WaitToReboot {
if reboot {
return Ok(());
}
} else if state.is_success() {
return Ok(());
} else if state.is_failure() {
anyhow::bail!("Encountered failure state");
}
}
Err(anyhow!("Installation ended unexpectedly"))
}
/// The set of events associated with the `wait-for-commit` path.
#[derive(Debug, PartialEq)]
enum CommitEvent {
Begin,
Warning,
End,
}
/// An observer of `update wait-for-commit`.
trait CommitObserver {
fn on_event(&self, event: CommitEvent);
}
/// A `CommitObserver` that forwards the events to stdout.
struct Printer;
impl CommitObserver for Printer {
fn on_event(&self, event: CommitEvent) {
let text = match event {
CommitEvent::Begin => "Waiting for commit.",
// TODO(fxbug.dev/64590) update warning message to be more helpful.
CommitEvent::Warning => "It's been 30 seconds. Something is probably wrong.",
CommitEvent::End => "Committed!",
};
println!("{}", text);
}
}
/// Waits for the system to commit (e.g. when the EventPair observes a signal).
async fn wait_for_commit(proxy: &CommitStatusProviderProxy) -> Result<(), Error> {
let p = proxy.is_current_system_committed().await.context("while obtaining EventPair")?;
fasync::OnSignals::new(&p, zx::Signals::USER_0)
.await
.context("while waiting for the commit")?;
Ok(())
}
/// Waits for the commit and sends updates to the observer. This is abstracted from the regular
/// `handle_wait_for_commit` fn so we can test events without having to wait the `WARNING_DURATION`.
/// The [testability rubric](https://fuchsia.dev/fuchsia-src/concepts/testing/testability_rubric)
/// exempts logs from testing, but in this case we test them anyway because of the additional layer
/// of complexity that the warning timeout introduces.
async fn handle_wait_for_commit_impl(
proxy: &CommitStatusProviderProxy,
observer: impl CommitObserver,
) -> Result<(), Error> {
let () = observer.on_event(CommitEvent::Begin);
let commit_fut = wait_for_commit(&proxy).fuse();
futures::pin_mut!(commit_fut);
let mut timer_fut = fasync::Timer::new(WARNING_DURATION).fuse();
// Send a warning after the WARNING_DURATION.
let () = futures::select! {
commit_res = commit_fut => commit_res?,
_ = timer_fut => observer.on_event(CommitEvent::Warning),
};
// If we timed out on WARNING_DURATION, try again.
if !commit_fut.is_terminated() {
let () = commit_fut.await.context("while calling wait_for_commit second")?;
}
let () = observer.on_event(CommitEvent::End);
Ok(())
}
/// Waits for the commit and prints updates to stdout.
async fn handle_wait_for_commit(proxy: &CommitStatusProviderProxy) -> Result<(), Error> {
handle_wait_for_commit_impl(&proxy, Printer).await
}
async fn handle_cmd(cmd: args::Command) -> Result<(), Error> {
match cmd {
args::Command::Channel(args::Channel { cmd }) => {
let channel_control = connect_to_service::<ChannelControlMarker>()
.context("Failed to connect to channel control service")?;
handle_channel_control_cmd(cmd, channel_control).await?;
}
args::Command::CheckNow(check_now) => {
let update_manager = connect_to_service::<ManagerMarker>()
.context("Failed to connect to update manager")?;
handle_check_now_cmd(check_now, update_manager).await?;
}
args::Command::ForceInstall(args) => {
force_install(args.update_pkg_url, args.reboot, args.service_initiated).await?;
}
args::Command::WaitForCommit(_) => {
let proxy = connect_to_service::<CommitStatusProviderMarker>()
.context("while connecting to fuchsia.update/CommitStatusProvider")?;
handle_wait_for_commit(&proxy).await?;
}
}
Ok(())
}
pub fn main() -> Result<(), Error> {
let mut executor = fasync::Executor::new()?;
let args::Update { cmd } = argh::from_env();
executor.run_singlethreaded(handle_cmd(cmd))
}
#[cfg(test)]
mod tests {
use {
super::*,
fidl::endpoints::create_proxy_and_stream,
fidl_fuchsia_update::CommitStatusProviderRequest,
fidl_fuchsia_update_channelcontrol::ChannelControlRequest,
fuchsia_zircon::{DurationNum, EventPair, HandleBased, Peered},
futures::{pin_mut, task::Poll},
matches::assert_matches,
parking_lot::Mutex,
};
async fn perform_channel_control_test<V>(argument: args::channel::Command, verifier: V)
where
V: Fn(ChannelControlRequest),
{
let (proxy, mut stream) = create_proxy_and_stream::<ChannelControlMarker>().unwrap();
let fut = async move {
assert_matches!(handle_channel_control_cmd(argument, proxy).await, Ok(()));
};
let stream_fut = async move {
let result = stream.next().await.unwrap();
match result {
Ok(cmd) => verifier(cmd),
err => panic!("Err in request handler: {:?}", err),
}
};
future::join(fut, stream_fut).await;
}
#[fasync::run_singlethreaded(test)]
async fn test_channel_get() {
perform_channel_control_test(args::channel::Command::Get(args::channel::Get {}), |cmd| {
match cmd {
ChannelControlRequest::GetCurrent { responder } => {
responder.send("channel").unwrap();
}
request => panic!("Unexpected request: {:?}", request),
}
})
.await;
}
#[fasync::run_singlethreaded(test)]
async fn test_channel_target() {
perform_channel_control_test(
args::channel::Command::Target(args::channel::Target {}),
|cmd| match cmd {
ChannelControlRequest::GetTarget { responder } => {
responder.send("target-channel").unwrap();
}
request => panic!("Unexpected request: {:?}", request),
},
)
.await;
}
#[fasync::run_singlethreaded(test)]
async fn test_channel_set() {
perform_channel_control_test(
args::channel::Command::Set(args::channel::Set { channel: "new-channel".to_string() }),
|cmd| match cmd {
ChannelControlRequest::SetTarget { channel, responder } => {
assert_eq!(channel, "new-channel");
responder.send().unwrap();
}
request => panic!("Unexpected request: {:?}", request),
},
)
.await;
}
#[fasync::run_singlethreaded(test)]
async fn test_channel_list() {
perform_channel_control_test(args::channel::Command::List(args::channel::List {}), |cmd| {
match cmd {
ChannelControlRequest::GetTargetList { responder } => {
responder.send(&mut vec!["some-channel", "other-channel"].into_iter()).unwrap();
}
request => panic!("Unexpected request: {:?}", request),
}
})
.await;
}
struct TestObserver {
events: Mutex<Vec<CommitEvent>>,
}
impl TestObserver {
fn new() -> Self {
Self { events: Mutex::new(vec![]) }
}
fn assert_events(&self, expected_events: &[CommitEvent]) {
assert_eq!(self.events.lock().as_slice(), expected_events);
}
}
impl CommitObserver for &TestObserver {
fn on_event(&self, event: CommitEvent) {
self.events.lock().push(event);
}
}
#[test]
fn wait_for_commit() {
let mut executor = fasync::Executor::new_with_fake_time().unwrap();
let (proxy, mut stream) =
fidl::endpoints::create_proxy_and_stream::<CommitStatusProviderMarker>().unwrap();
let (p, p_stream) = EventPair::create().unwrap();
fasync::Task::spawn(async move {
while let Some(req) = stream.try_next().await.unwrap() {
let CommitStatusProviderRequest::IsCurrentSystemCommitted { responder } = req;
let pair = p_stream.duplicate_handle(zx::Rights::BASIC).unwrap();
let () = responder.send(pair).unwrap();
}
})
.detach();
let observer = TestObserver::new();
let fut = handle_wait_for_commit_impl(&proxy, &observer);
pin_mut!(fut);
// Begin the `wait_for_commit`.
match executor.run_until_stalled(&mut fut) {
Poll::Ready(res) => panic!("future unexpectedly completed with: {:?}", res),
Poll::Pending => (),
};
observer.assert_events(&[CommitEvent::Begin]);
// We should observe no new events when both the system is not committed and we are within
// the warning duration.
executor
.set_fake_time(fasync::Time::after((WARNING_DURATION - Duration::from_secs(1)).into()));
assert!(!executor.wake_expired_timers());
match executor.run_until_stalled(&mut fut) {
Poll::Ready(res) => panic!("future unexpectedly completed with: {:?}", res),
Poll::Pending => (),
};
observer.assert_events(&[CommitEvent::Begin]);
// Once we hit the warning duration, we should get a warning event.
executor.set_fake_time(fasync::Time::after(1.seconds()));
assert!(executor.wake_expired_timers());
match executor.run_until_stalled(&mut fut) {
Poll::Ready(res) => panic!("future unexpectedly completed with: {:?}", res),
Poll::Pending => (),
};
observer.assert_events(&[CommitEvent::Begin, CommitEvent::Warning]);
// Once we get the commit signal, the future should complete.
let () = p.signal_peer(zx::Signals::NONE, zx::Signals::USER_0).unwrap();
match executor.run_until_stalled(&mut fut) {
Poll::Ready(res) => res.unwrap(),
Poll::Pending => panic!("future unexpectedly pending"),
};
observer.assert_events(&[CommitEvent::Begin, CommitEvent::Warning, CommitEvent::End]);
}
}
|
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::io::BufRead;
//TODO: Clean strings from some chars
//TODO: Stems words!
//TODO: Refactor code
//TODO: Data Augmentation
//TODO: Add different data sets
//TODO: Add documentation
//TODO: Add means for cross validation
//TODO: Make smoothing hyperparameter, rework smoothing!
//TODO: Rustfmt!
struct LabelEncoder {
map : HashMap<String,u32>,
num_classes : u32,
}
impl LabelEncoder {
fn new() -> LabelEncoder {
LabelEncoder{map : HashMap::new() , num_classes : 0}
}
fn fit(&mut self, labels : &[&str]) {
let mut num_classes = self.num_classes;
//labels.iter().for_each(|l| self.map.entry(l.to_string()).or_insert_with(|| { self.num_classes += 1; self.num_classes - 1 } ));
for label in labels.iter() {
self.map.entry(label.to_string()).or_insert_with(|| { num_classes += 1; num_classes - 1 });
}
self.num_classes = num_classes;
}
fn transform(&self, labels : &[&str]) -> Vec<u32> {
labels.iter().map(|l| *self.map.get(*l).expect("Label not yet encoded!")).collect()
}
}
struct StopWordFilter {
stop_words : Vec<String>,
}
impl StopWordFilter {
fn new() -> StopWordFilter {
StopWordFilter{ stop_words : vec![] }
}
fn filter(&self, data : &str) -> String {
//TODO: implement
String::new()
}
}
struct Classifier {
count_vecs : Vec<HashMap<String,u64>>,
smoothing : f64,
}
impl Classifier {
fn new() -> Classifier {
Classifier { count_vecs : vec![] , smoothing : 1.0f64 }
}
fn train(&mut self, data_input : &[&str], labels_input : &[u64]) {
assert!(data_input.len() == labels_input.len(), "Number of class labels does not match number of sample data!");
let num_classes = *labels_input.iter().max().unwrap() + 1;
(0..num_classes).for_each(|_| self.count_vecs.push(HashMap::new()));
for (idx, data) in data_input.iter().enumerate() {
for word in data.split_whitespace() {
let word_count = self.count_vecs[labels_input[idx] as usize].entry(word.to_string()).or_insert(0u64);
*word_count += 1;
}
}
}
fn vocabulary_size(&self, class : usize) -> u64 {
self.count_vecs[class].iter().fold(0u64, |sum, (_,count)| sum + count)
}
fn classify(&self,input : &[&str])-> Vec<u64> {
assert!(self.count_vecs.len() > 0, "Classifier must be trained first!");
let probas = self.classify_proba(input);
probas.iter().map(|class_probas| {
// find index (class label) of maximum probability
let mut max_idx = 0;
for (idx,&p) in class_probas.iter().enumerate() {
if p > class_probas[max_idx] { max_idx = idx; }
}
max_idx as u64
}).collect()
}
fn classify_proba(&self, input : &[&str]) -> Vec<Vec<f64>> {
assert!(self.count_vecs.len() > 0, "Classifier must be trained first!");
let num_classes = self.count_vecs.len();
let mut probas = Vec::with_capacity(input.len());
for (idx, data) in input.iter().enumerate() {
probas.push(vec![1.0f64;num_classes]);
for word in data.split_whitespace() {
for i in 0..num_classes {
probas[idx][i] *= (*self.count_vecs[i].get(word).unwrap_or(&0) as f64 + self.smoothing) / (self.vocabulary_size(i) as f64 + self.smoothing);
}
}
}
probas
}
fn accuracy(&self, input : &[&str], known_labels : &[u64]) -> f64 {
assert_eq!(input.len(), known_labels.len(),"TODO: Add msg!");
let predictions = self.classify(input);
let count = predictions.iter().zip(known_labels.iter()).fold(0,|count, (predicted_label,known_label)| {
if predicted_label == known_label { count + 1 }
else { count }
});
count as f64 / input.len() as f64
}
}
fn main() { let mut data : Vec<String> = vec![];
let mut labels : Vec<u64> = vec![];
let filename = "";
let f = File::open(filename).expect("file not found");
let file = BufReader::new(&f);
for line in file.lines() {
let mut l = line.unwrap();
if l.starts_with("ham") {
data.push(l.split_off(3)
.to_lowercase()
.replace("."," ")
.replace(","," ")
.replace("!"," "));
labels.push(0);
} else {
data.push(l.split_off(4)
.to_lowercase()
.replace("."," ")
.replace(","," ")
.replace("!"," "));
labels.push(1);
}
}
let mut input : Vec<&str> = vec![];
for l in data.iter() {
input.push(&l);
}
let mut clf = Classifier::new();
clf.train(&input[..5000],&labels[..5000]);
//println!("{}",clf.classify(&input[4001..]).len());
println!("{}",clf.accuracy(&input[5001..],&labels[5001..]));
}
#[test]
fn test_class_label_encoding() {
let mut class_label_encoder = LabelEncoder::new();
let labels = vec!["class_a", "class_b", "class_a", "class_c"];
class_label_encoder.fit(&labels);
let encoded_labels = class_label_encoder.transform(&labels);
assert_eq!(encoded_labels[0],0);
assert_eq!(encoded_labels[1],1);
assert_eq!(encoded_labels[2],0);
assert_eq!(encoded_labels[3],2);
}
#[test]
fn test_vocabulary_size() {
let data = vec!["such a mad mad world"];
let classes = [0];
let mut clf = Classifier::new();
clf.train(&data,&classes);
assert_eq!(clf.vocabulary_size(0), 5);
}
#[test]
fn test_count_words() {
let data = vec!["mad mad world","beautiful world"];
let classes = [0,1];
let mut clf = Classifier::new();
clf.train(&data, &classes);
assert_eq!(clf.count_vecs.len(), 2);
assert_eq!(*clf.count_vecs[0].get("world").unwrap(), 1u64);
assert_eq!(*clf.count_vecs[0].get("mad").unwrap(), 2u64);
assert_eq!(clf.count_vecs[0].get("beautiful"), Option::None);
assert_eq!(*clf.count_vecs[1].get("beautiful").unwrap(), 1u64);
assert_eq!(clf.count_vecs[1].get("mad"), Option::None);
}
#[test]
fn test_classify() {
let data = vec!["mad mad world","beautiful world"];
let classes = [0,1];
let mut clf = Classifier::new();
clf.train(&data,&classes);
assert_eq!(clf.classify(&["mad mad world"]) ,[0]);
assert_eq!(clf.classify(&["beautiful world"]),[1]);
}
#[test]
fn test_accuracy() {
let data = vec!["mad mad world","beautiful world"];
let classes = [0,1];
let mut clf = Classifier::new();
clf.train(&data,&classes);
assert_eq!(clf.accuracy(&data,&classes),1.0);
}
#[test]
#[should_panic]
fn test_untrained_classifier() {
let data = vec!["mad mad world"];
let mut clf = Classifier::new();
clf.classify(&data);
}
#[test]
fn test_stop_word_filter() {
let data = "a mad mad world";
let stop_word_filter = StopWordFilter::new();
assert_eq!(&stop_word_filter.filter(data), "mad mad world");
} |
//! Example domains for Canrun collections
use crate::lmap::LMap;
use crate::lvec::LVec;
canrun::domain! {
pub Collections {
i32,
LMap<i32, i32>,
LVec<i32>
}
}
|
use crate::{
config::CategoryIcon,
models::{Category, CategoryKind},
tools::slugify,
};
use chrono::{DateTime, FixedOffset};
use hashbrown::HashMap;
use lazy_static::*;
use regex::{Captures, NoExpand, Regex};
lazy_static! {
static ref TRAILING_SPACE: Regex = Regex::new(r"[\r\n\s]*$").unwrap();
static ref LINE_BREAK: Regex = Regex::new(r"\r*\n").unwrap();
static ref NEW_LINE: Regex = Regex::new(r"(\r\n|\n|\r)").unwrap();
}
/// Material icon tag
///
/// https://material.io/icons/
pub fn icon_tag(name: &str) -> String {
format!("<i class=\"material-icons {}\">{}</i>", name, name)
}
/// Month Day, Year (March 15, 1973)
pub fn date_string(d: DateTime<FixedOffset>) -> String {
d.format("%B %e, %Y").to_string()
}
/// HTML tag for post category icon
pub fn category_icon(kind: &CategoryKind, config: &CategoryIcon) -> String {
let icon = match kind.to_string().to_lowercase().as_str() {
"who" => &config.who,
"what" => &config.what,
"when" => &config.when,
"where" => &config.r#where,
_ => &config.default,
};
icon_tag(icon)
}
/// HTML tag for mode of travel category icon
pub fn travel_mode_icon(
categories: &[Category],
mode_icons: &HashMap<String, Regex>,
) -> Option<String> {
categories
.iter()
.find(|c: &'_ &Category| c.kind == CategoryKind::What)
.and_then(|c: &Category| {
for (icon_name, re) in mode_icons.iter() {
if re.is_match(&c.name) {
return Some(icon_name.to_owned());
}
}
None
})
}
pub fn say_number(n: usize) -> String {
let word = String::from(match n {
0 => "Zero",
1 => "One",
2 => "Two",
3 => "Three",
4 => "Four",
5 => "Five",
6 => "Six",
7 => "Seven",
8 => "Eight",
9 => "Nine",
10 => "Ten",
11 => "Eleven",
12 => "Twelve",
20 => "Twenty",
30 => "Thirty",
40 => "Forty",
50 => "Fifty",
60 => "Sixty",
70 => "Seventy",
80 => "Eighty",
90 => "Ninety",
_ => "",
});
if word.is_empty() {
n.to_string()
} else {
word
}
}
pub fn list_label(word: &str, list: impl ExactSizeIterator) -> String {
format!("{} {}", say_number(list.len()), plural(word, list.len()))
}
/// Format English word to plural form if count != 1
pub fn plural(word: &str, count: usize) -> String {
if count == 1 {
word.to_owned()
} else if word.ends_with('y') {
format!("{}ies", word.trim_end_matches('y'))
} else {
format!("{}s", word)
}
}
pub fn fraction(f: &str) -> String {
lazy_static! {
// two numbers separated by a forward slash
static ref SLASH_NUMBERS: Regex = Regex::new(r"(\d+)/(\d+)").unwrap();
}
SLASH_NUMBERS
.replace_all(f, "<sup>$1</sup>⁄<sub>$2</sub>")
.into_owned()
}
fn format_superscript(text: &str) -> String {
lazy_static! {
// match superscripts but don't match atomic numbers
static ref FOOTNOTE_NUMBER: Regex =
Regex::new(r"([^/\s])([⁰¹²³⁴⁵⁶⁷⁸⁹]+)\B").unwrap();
}
FOOTNOTE_NUMBER
.replace_all(text, "$1<sup>$2</sup>")
.into_owned()
}
/// Convert bare URLs into HTML links
fn link_urls(text: &str) -> String {
lazy_static! {
static ref URL: Regex =
Regex::new(r"\b(?P<url>https?://[^\s]+)\b").unwrap();
static ref DOMAIN: Regex = Regex::new(r"https?://[^/]+/?").unwrap();
static ref LAST_PATH: Regex =
Regex::new(r"/([^/?#]+)(\?|\#|$)").unwrap();
}
URL.replace_all(&text, |c: &Captures| {
let url: &str = &c["url"];
let domain: &str = &DOMAIN.captures(url).unwrap()[0];
let path = url.replace(domain, "");
let domain = domain.replace("//www.", "//");
if path.contains('/') {
let page: &str = &LAST_PATH.captures(&path).unwrap()[1];
format!("<a href=\"{}\">{}…/{}</a>", url, domain, page)
} else {
format!("<a href=\"{}\">{}{}</a>", url, domain, path)
}
})
.into_owned()
}
/// Replace UTF superscript with HTML superscript
fn format_footnotes(notes: &str) -> String {
lazy_static! {
static ref ASTERISK: Regex = Regex::new(r"^\s*\*").unwrap();
static ref SUPERSCRIPT: Regex =
Regex::new(r"[⁰¹²³⁴⁵⁶⁷⁸⁹]+\s*").unwrap();
// trailing empty item
static ref EMPTY_ITEM: Regex =
Regex::new(r"</span></li><li><span>\s*$").unwrap();
static ref ASTERISK_ITEM: Regex =
Regex::new(r"<li><span>\s*\*\s*").unwrap();
}
let has_asterisk: bool = ASTERISK.is_match(notes);
// photo credit asterisk becomes note 0
let li_start = if has_asterisk { " start=\"0\"" } else { "" };
let mut html: String = link_urls(¬es);
html = SUPERSCRIPT.replace_all(&html, "").into_owned();
html = LINE_BREAK
.replace_all(&html, "</span></li><li><span>")
.into_owned();
html = EMPTY_ITEM.replace_all(&html, "").into_owned();
html = format!(
"<ol class=\"footnotes\"{}><li><span>{}</span></li></ol>",
li_start, html
);
if has_asterisk {
let replacement =
format!("<li class=\"credit\">{}<span>", icon_tag("star"));
return ASTERISK_ITEM
.replace_all(&html, NoExpand(&replacement))
.into_owned();
}
html
}
/// Linked list of photo tags
pub fn photo_tag_list(list: &[String]) -> String {
lazy_static! {
static ref NON_WORD: Regex = Regex::new(r"\W").unwrap();
}
let mut tag_list: Vec<String> = Vec::new();
for t in list.iter() {
tag_list.push(format!(
"<a href=\"/photo-tag/{}\" rel=\"tag\">{}</a>",
slugify(&t),
t
));
}
tag_list.sort();
tag_list.join("\n")
}
/// Remove block quotes and wrap in fake tags that won't match subsequent
/// operations
fn unformat_block_quote(html: &str) -> String {
lazy_static! {
// long quote followed by line break or end of text
static ref BLOCK_QUOTE: Regex =
Regex::new(r"(\r\n|\r|\n|^)\s*(?P<quote>“[^”]{200,}”[⁰¹²³⁴⁵⁶⁷⁸⁹]*)\s*(\r\n|\r|\n|$)").unwrap();
static ref CURLY_QUOTE: Regex = Regex::new("[“”]").unwrap();
}
BLOCK_QUOTE
.replace_all(&html, |c: &Captures| {
let quote = CURLY_QUOTE.replace_all(&c["quote"], "").into_owned();
format!("[Q]{}[/Q]", quote)
})
.into_owned()
}
/// Restore HTML blockquote tags
fn format_block_quote(text: &str) -> String {
lazy_static! {
static ref AFTER_BLOCK_QUOTE: Regex =
Regex::new(r"\[/Q\][\r\n\s]*([^<]+)").unwrap();
static ref START_BLOCK_QUOTE: Regex =
Regex::new(r"(<p>)?\[Q\]").unwrap();
static ref END_BLOCK_QUOTE: Regex =
Regex::new(r"\[/Q\](</p>)?").unwrap();
/// Starting block quote with closing p tag will make an orphan if there
/// isn't any preceding text
static ref P_ORPHAN: Regex = Regex::new(r"^</p>").unwrap();
}
let mut html: String = text.to_string();
html = AFTER_BLOCK_QUOTE
.replace_all(&html, "[/Q]<p class=\"first\">$1")
.into_owned();
html = START_BLOCK_QUOTE
.replace_all(&html, "</p><blockquote><p>")
.into_owned();
html = END_BLOCK_QUOTE
.replace_all(&html, "</p></blockquote>")
.into_owned();
P_ORPHAN.replace_all(&html, "").into_owned()
}
/// Convert new lines to HTML paragraphs and normalize links
pub fn caption(text: &str) -> String {
if text.is_empty() {
return String::new();
}
lazy_static! {
static ref EMPTY_P_TAG: Regex = Regex::new(r"<p[^>]*></p>").unwrap();
// match the first HTML paragraph if it's short and contains a quote
static ref QUIP: Regex = Regex::new(r"^\s*<p>(?P<quote>“[^”]{4,80}”[^<]{0,50})</p>").unwrap();
// poems are preceded and followed by lone tilde (~)
static ref POEM: Regex = Regex::new(r"(^|\s+)~(\r\n|\r|\n)(?P<poem>([^\r\n]{3,100}([\r\n]+)){3,})~(\s+|$)").unwrap();
// notes are preceded by three underscores (___) and followed by EOF
static ref FOOTNOTES: Regex =
Regex::new(r"(^|[\r\n]+)_{3}[\r\n]*(?P<notes>[\s\S]+)$").unwrap();
}
// unique placeholder key for each poem
let poem_key = |i: usize| format!("[P_{}]", i);
let mut footnotes: String = String::new();
let mut poems: Vec<String> = Vec::new();
let mut html: String = text.to_string();
// set aside footnotes (no placeholder needed because always last)
html = FOOTNOTES
.replace_all(&html, |c: &Captures| {
footnotes = format_footnotes(&c["notes"]);
""
})
.into_owned();
// set aside poems and substitute with placeholder
html = POEM
.replace_all(&html, |c: &Captures| {
poems.push(format_poem(&c["poem"]));
poem_key(poems.len() - 1)
})
.into_owned();
html = unformat_block_quote(&html);
html = format!("<p>{}</p>", html);
html = NEW_LINE.replace_all(&html, "</p><p>").into_owned();
html = EMPTY_P_TAG.replace_all(&html, "").into_owned();
html = QUIP
.replace_all(&html, |c: &Captures| {
format!("<p class=\"quip\">{}</p>", &c["quote"])
})
.into_owned();
html = format_superscript(&html);
html = format_block_quote(&html);
for (i, p) in poems.iter().enumerate() {
// re-insert poems
let key = poem_key(i);
html = html.replace(&key, &format!("</p>{}<p class=\"first\">", p));
html = EMPTY_P_TAG.replace_all(&html, "").into_owned();
}
format!("{}{}", html, footnotes)
}
fn format_line_breaks(text: &str) -> String {
lazy_static! {
static ref MULTI_BREAK: Regex = Regex::new(r"(<br/>){2,}").unwrap();
}
let text = LINE_BREAK.replace_all(&text, "<br/>").into_owned();
let text = MULTI_BREAK.replace_all(&text, "</p><p>").into_owned();
format!("<p>{}</p>", text)
}
fn format_poem(text: &str) -> String {
lazy_static! {
// exactly three lines
static ref HAIKU: Regex =
Regex::new(r"^([^\r\n]{3,100}([\r\n]+|$)){3}$").unwrap();
// indentations in poems are three spaces
static ref POEM_INDENT: Regex = Regex::new(" {3}").unwrap();
}
let (css, icon) = if HAIKU.is_match(&text) {
("haiku", icon_tag("spa"))
} else {
("poem", String::new())
};
let mut poem: String = text.to_owned();
poem = TRAILING_SPACE.replace_all(&poem, "").into_owned();
poem = format_line_breaks(&poem);
poem = POEM_INDENT
.replace_all(&poem, "<span class=\"tab\"></span>")
.into_owned();
poem = format_superscript(&poem);
format!(
"<blockquote class=\"{}\">{}{}</blockquote>",
css, poem, icon
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::{CategoryConfig, CategoryIcon};
use crate::{
models::{Category, CategoryKind},
tools::config_regex,
};
const NEW_LINE: &str = "\r\n";
const LIPSUM: &str = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
const QUOTE:&str = "Firefighters are working to get a handle on several wildfires that sparked during a lightning storm on Thursday night. Strong winds and poor visibility created challenges for firefighters working the blazes on Saturday ...";
#[test]
fn creates_icon_tag() {
assert_eq!(
icon_tag("star"),
"<i class=\"material-icons star\">star</i>"
);
}
#[test]
fn fraction_html() {
assert_eq!(fraction("1/2"), "<sup>1</sup>⁄<sub>2</sub>");
}
#[test]
fn footnotes_as_ordered_list() {
let source = format!(
"* Note about photo credit{cr}\
¹ Some other note{cr}\
² Last note",
cr = NEW_LINE
);
let target = "<ol class=\"footnotes\" start=\"0\">\
<li class=\"credit\"><i class=\"material-icons star\">star</i><span>Note about photo credit</span></li>\
<li><span>Some other note</span></li>\
<li><span>Last note</span></li>\
</ol>";
assert_eq!(format_footnotes(&source), target);
}
#[test]
fn photo_tag_lists() {
let tags: Vec<String> = vec![
"Second".to_owned(),
"First".to_owned(),
"Third and Last".to_owned(),
];
let target = "<a href=\"/photo-tag/first\" rel=\"tag\">First</a>\n\
<a href=\"/photo-tag/second\" rel=\"tag\">Second</a>\n\
<a href=\"/photo-tag/third-and-last\" rel=\"tag\">Third and Last</a>";
assert_eq!(photo_tag_list(&tags), target);
}
#[test]
fn category_icon_tag() {
let config = CategoryIcon {
who: "person".to_owned(),
what: "directions".to_owned(),
when: "date_range".to_owned(),
r#where: "map".to_owned(),
default: "local_offer".to_owned(),
};
assert_eq!(
category_icon(&CategoryKind::Who, &config),
icon_tag("person")
);
assert_eq!(
category_icon(&CategoryKind::What, &config),
icon_tag("directions")
);
//assert_eq!(category_icon(&"nope", &config), icon_tag("local_offer"));
}
#[test]
fn travel_mode_icon_tag() {
let config = CategoryConfig {
icon: CategoryIcon {
who: "person".to_owned(),
what: "directions".to_owned(),
when: "date_range".to_owned(),
r#where: "map".to_owned(),
default: "local_offer".to_owned(),
},
what_regex: Some(vec![
("motorcycle".to_owned(), "(KTM|BMW|Honda)".to_owned()),
("bicycle".to_owned(), "bicycle".to_owned()),
]),
display: Vec::new(),
};
let categories: Vec<Category> =
vec![Category::new("KTM", CategoryKind::What)];
assert_eq!(
travel_mode_icon(&categories, &config_regex(&config.what_regex)),
Some("motorcycle".to_owned())
);
}
#[test]
fn superscripts() {
let source = format!("{}²", LIPSUM);
let target = format!("<p>{}<sup>²</sup></p>", LIPSUM);
assert_eq!(caption(&source), target);
}
#[test]
fn plural_test() {
assert_eq!(plural("theory", 1), "theory");
assert_eq!(plural("theory", 0), "theories");
assert_eq!(plural("count", 1), "count");
assert_eq!(plural("count", 20), "counts");
}
#[test]
fn line_breaks() {
let source =
format!("one{cr}two{cr}{cr}three{cr}{cr}{cr}four", cr = NEW_LINE);
let target = "<p>one<br/>two</p>\
<p>three</p>\
<p>four</p>";
assert_eq!(format_line_breaks(&source), target);
}
#[test]
fn url_formatting() {
const URL1: &str = "http://en.wikipedia.org/wiki/Sweet_Pickles";
const URL2: &str = "http://www.amazon.com/Cheryl-Dudley/e/B001JP7LNO/ref=ntt_athr_dp_pel_1";
const URL3: &str = "http://www.trailimage.com/trinity-ridge-fire-tour";
let source = format!(
"¹ Wikipedia: {} ² Cheryl Reed, January 17, 2003: {}",
URL1, URL2,
);
let target = format!(
"¹ Wikipedia: <a href=\"{}\">http://en.wikipedia.org/…/Sweet_Pickles</a> \
² Cheryl Reed, January 17, 2003: <a href=\"{}\">http://amazon.com/…/ref=ntt_athr_dp_pel_1</a>",
URL1, URL2,
);
assert_eq!(link_urls(&source), target);
let source =
format!("¹ Trail Image, “Trinity Ridge Fire Tour”: {}", URL3);
let target = format!(
"¹ Trail Image, “Trinity Ridge Fire Tour”: \
<a href=\"{}\">http://trailimage.com/trinity-ridge-fire-tour</a>",
URL3
);
assert_eq!(link_urls(&source), target);
}
#[test]
fn poem_formatting() {
let source = format!(
"Begotten Not Born{cr}\
Indwelling Transcendence{cr} Infinite Regress{cr}\
Uncertain Progress",
cr = NEW_LINE
);
let target = "<blockquote class=\"poem\"><p>\
Begotten Not Born<br/>\
Indwelling Transcendence<br/>\
<span class=\"tab\"></span><span class=\"tab\"></span>\
Infinite Regress<br/>\
Uncertain Progress</p></blockquote>";
assert_eq!(format_poem(&source), target);
}
#[test]
fn haiku_formatting() {
let source = format!(
"cow stands chewing{cr}\
wet meadow grass{cr}\
while mud swallows wheels{cr}",
cr = NEW_LINE
);
let target = "<blockquote class=\"haiku\"><p>\
cow stands chewing<br/>\
wet meadow grass<br/>\
while mud swallows wheels</p>\
<i class=\"material-icons spa\">spa</i>\
</blockquote>";
assert_eq!(format_poem(&source), target);
}
#[test]
fn caption_with_footnotes() {
let source = format!(
"{txt}{cr}\
___{cr}\
* Note about photo credit{cr}\
¹ Some other note{cr}\
² Last note",
txt = LIPSUM,
cr = NEW_LINE
);
let target = format!("<p>{}</p>\
<ol class=\"footnotes\" start=\"0\">\
<li class=\"credit\">\
<i class=\"material-icons star\">star</i><span>Note about photo credit</span></li>\
<li><span>Some other note</span></li>\
<li><span>Last note</span></li>\
</ol>", LIPSUM);
assert_eq!(caption(&source), target);
let source = format!(
"{txt}{cr}___{cr}¹ Some other note{cr}² Last note",
txt = LIPSUM,
cr = NEW_LINE
);
let target = format!(
"<p>{}</p>\
<ol class=\"footnotes\">\
<li><span>Some other note</span></li>\
<li><span>Last note</span></li>\
</ol>",
LIPSUM
);
assert_eq!(caption(&source), target);
// should ignore trailing newline
let source = format!("{}{}", source, NEW_LINE);
assert_eq!(caption(&source), target);
}
#[test]
fn caption_ending_with_block_quote() {
let source =
format!("{txt}{cr}{cr}“{txt}”", txt = LIPSUM, cr = NEW_LINE);
let target = format!(
"<p>{txt}</p>\
<blockquote><p>{txt}</p></blockquote>",
txt = LIPSUM
);
assert_eq!(caption(&source), target);
}
#[test]
fn block_quoted_paragraphs() {
let source = format!(
"{txt}{cr}{cr}\
“{txt}{cr}{cr}\
“{txt}{cr}{cr}\
“{txt}”",
txt = LIPSUM,
cr = NEW_LINE
);
let target = format!(
"<p>{txt}</p>\
<blockquote>\
<p>{txt}</p>\
<p>{txt}</p>\
<p>{txt}</p>\
</blockquote>",
txt = LIPSUM
);
assert_eq!(caption(&source), target);
}
#[test]
fn block_quote_within_other_text() {
let source = format!(
"{txt}{cr}{cr}\
“{txt}”{cr}{cr}\
{txt}",
txt = LIPSUM,
cr = NEW_LINE
);
let target = format!(
"<p>{txt}</p>\
<blockquote><p>{txt}</p></blockquote>\
<p class=\"first\">{txt}</p>",
txt = LIPSUM
);
assert_eq!(caption(&source), target);
}
#[test]
fn intermediate_block_quote_format() {
let source = format!(
"{txt}{cr}{cr}“{q}”¹{cr}{cr}{txt}”",
txt = LIPSUM,
q = QUOTE,
cr = NEW_LINE
);
let target = format!("{txt}[Q]{p}¹[/Q]{txt}”", txt = LIPSUM, p = QUOTE);
assert_eq!(unformat_block_quote(&source), target);
}
#[test]
fn footnoted_block_quote() {
let source = format!(
"{txt}[Q]{q}<sup>¹</sup>[/Q]{txt}”",
txt = LIPSUM,
q = QUOTE
);
let target = format!(
"{txt}</p>\
<blockquote><p>{q}<sup>¹</sup></p></blockquote>\
<p class=\"first\">{txt}”",
txt = LIPSUM,
q = QUOTE
);
assert_eq!(format_block_quote(&source), target);
}
#[test]
fn block_quote_with_ellipsis() {
let source = format!(
"{txt}{cr}{cr}“{q}”¹{cr}{cr}{txt}",
txt = LIPSUM,
q = QUOTE,
cr = NEW_LINE
);
let target = format!(
"<p>{txt}</p>\
<blockquote><p>{p}<sup>¹</sup></p></blockquote>\
<p class=\"first\">{txt}</p>",
p = QUOTE,
txt = LIPSUM
);
assert_eq!(caption(&source), target);
}
#[test]
fn caption_that_is_entirely_quote() {
let source = format!("“{}”¹", LIPSUM);
let target =
format!("<blockquote><p>{}<sup>¹</sup></p></blockquote>", LIPSUM);
assert_eq!(caption(&source), target);
}
// do no blockquote when quote is interrupted
// “The constitutions of nearly all the states have qualifications for voters simply on citizenship,” Pefley countered, “without question with regard to what they believe on this or that question. Then I ask, why make a distinction of the people of Idaho?
// “It appears to have been reserved for Idaho’s constitution to put in the first religious test in regard to the right of suffrage and holding office … Political and religious persecution are supposed to have died at the termination of the revolution but it appears that Idaho is again an exception.”¹
// Pefley’s arguments were unheeded and the section was approved.
#[test]
fn interrupted_quotes() {
let source = format!(
"“{txt},” he said, “{txt}{cr}{cr}“{txt}”{cr}{cr}",
txt = LIPSUM,
cr = NEW_LINE
);
let target = format!(
"<p>“{txt},” he said, “{txt}</p>\
<blockquote><p>{txt}</p></blockquote>",
txt = LIPSUM
);
assert_eq!(caption(&source), target);
}
#[test]
fn caption_that_is_entirely_poem() {
let source = format!(
"~{cr}\
Begotten Not Born{cr}\
Indwelling Transcendence{cr} Infinite Regress{cr}\
Uncertain Progress{cr}\
~",
cr = NEW_LINE
);
let target = "<blockquote class=\"poem\"><p>\
Begotten Not Born<br/>\
Indwelling Transcendence<br/>\
<span class=\"tab\"></span><span class=\"tab\"></span>\
Infinite Regress<br/>\
Uncertain Progress</p></blockquote>";
assert_eq!(caption(&source), target);
}
#[test]
fn caption_beginning_with_haiku() {
let source = format!(
"~{cr}\
cow stands chewing{cr}\
wet meadow grass{cr}\
while mud swallows wheels{cr}\
~{cr}\
Here we have Joel “Runs with Cows” Abbott. \
He did a little loop out among them—kind of became one of them.",
cr = NEW_LINE
);
let target = "<blockquote class=\"haiku\"><p>\
cow stands chewing<br/>\
wet meadow grass<br/>\
while mud swallows wheels\
</p><i class=\"material-icons spa\">spa</i>\
</blockquote>\
<p class=\"first\">Here we have Joel “Runs with Cows” Abbott. \
He did a little loop out among them—kind of became one of them.</p>";
assert_eq!(caption(&source), target);
}
#[test]
fn long_inline_poem() {
let poem_text = format!(
"~{cr}\
Have you ever stood on the top of a mountain{cr}\
And gazed down on the grandeur below{cr}\
And thought of the vast army of people{cr} \
Who never get out as we go?{cr}\
{cr}\
Have you ever trailed into the desert{cr}\
Where the hills fade from gold into blue,{cr}\
And then thought of some poor other fellow{cr}\
Who would like to stand alongside of you?{cr}\
~",
cr = NEW_LINE
);
const POEM_HTML: &str = "<blockquote class=\"poem\">\
<p>\
Have you ever stood on the top of a mountain<br/>\
And gazed down on the grandeur below<br/>\
And thought of the vast army of people<br/>\
<span class=\"tab\"></span>Who never get out as we go?</p>\
<p>\
Have you ever trailed into the desert<br/>\
Where the hills fade from gold into blue,<br/>\
And then thought of some poor other fellow<br/>\
Who would like to stand alongside of you?</p>\
</blockquote>";
// no text after poem
let source = format!(
"{txt}{cr}{cr}{poem}",
txt = LIPSUM,
cr = NEW_LINE,
poem = poem_text
);
let target =
format!("<p>{txt}</p>{poem}", txt = LIPSUM, poem = POEM_HTML);
assert_eq!(caption(&source), target);
// text after poem
let source = format!(
"{txt}{cr}{cr}{poem}{cr}{cr}{txt}",
txt = LIPSUM,
poem = poem_text,
cr = NEW_LINE
);
let target = format!(
"<p>{txt}</p>{poem}<p class=\"first\">{txt}</p>",
txt = LIPSUM,
poem = POEM_HTML
);
assert_eq!(caption(&source), target);
}
#[test]
fn does_not_make_conversation_into_poem() {
let source = format!(
"“What’s wrong Brenna?” I ask.{cr}\
{cr}\
“I can’t sleep.”{cr}\
{cr}\
“Just lay down.”{cr}\
{cr}\
“I can’t.”{cr}\
{cr}\
“Brenna,” I insist, “lay down.”",
cr = NEW_LINE
);
let target = "<p class=\"quip\">“What’s wrong Brenna?” I ask.</p>\
<p>“I can’t sleep.”</p>\
<p>“Just lay down.”</p>\
<p>“I can’t.”</p>\
<p>“Brenna,” I insist, “lay down.”</p>";
assert_eq!(caption(&source), target);
}
#[test]
fn footnoted_poem() {
const P1: &str = "Now many years have passed since we lived there \
and little connects us to that place—now in other hands—other than \
our shared memories. My mom has written of Our Old House:";
const P3: &str =
"This particular weekend had an additional attraction, my nephew \
Kaden’s seventh birthday party. I don’t see my nephews often so I \
was glad for the coincidence of events.";
const URL1: &str = "http://en.wikipedia.org/wiki/Sweet_Pickles";
const URL2: &str = "http://www.amazon.com/Cheryl-Dudley/e/B001JP7LNO/ref=ntt_athr_dp_pel_1";
let source = format!(
"{p1}{cr}\
~{cr}\
When I drive by I always think I see myself{cr}\
standing in the large picture window waving,{cr}\
wishing I’d stop by and have a spot of tea.{cr}\
{cr}\
But I know its only what I want{cr}\
because I didn’t want to leave, you see,{cr}\
and when I drive by, smell the row{cr}\
of lilacs I planted along the road,{cr}\
see the gray smoke curling from the chimney,{cr}\
{cr}\
I want to pull in and stop,{cr}\
pretend I never left, unload the groceries,{cr}\
stoke the fire, straighten the photos on the wall{cr}\
and wash the dishes that have stacked{cr}\
by the sink for the last ten years.{cr}\
{cr}\
You’d be there, too, in your blue pajamas{cr}\
asking for a story. We’d climb the narrow{cr}\
staircase to your room and turn on the lamp,{cr}\
listening for a moment to the frogs outside,{cr}\
that bellowed thousands strong.{cr}\
{cr}\
I’d read your Sweet Pickles books¹{cr}\
and sing that Bumble Bee song you loved.{cr}\
Then we’d lay quietly and never grow old,{cr}\
while time went on without us, down{cr}\
the dusty country road, slipping over the horizon,{cr}\
leaving a soft orange glow for us to read by.²{cr}\
~{cr}\
In recent years I’ve tried to make the annual, three-hundred mile \
pilgrimage to “Troy Days.”³ Starchy pancake-feed food, a couple \
fire trucks and horses paraded down main street, and an evening of \
under-age inebriation make a good time, of course, but my trip is \
not for those things. Troy Days is when and where my dad’s \
brothers reunite annually from their homes across the western U.S. \
In their company, my mind can visit our old house, find a place \
alongside my dad, my grandma and the rest seated around a fire, \
our eyes all reflecting the same eternal glow.{cr}\
{cr}\
{p3}{cr}\
___{cr}\
¹ Wikipedia: {url1}{cr}\
² Cheryl Reed, January 17, 2003: {url2}",
cr = NEW_LINE,
p1 = P1,
p3 = P3,
url1 = URL1,
url2 = URL2
);
let target = format!("<p>{p1}</p>\
<blockquote class=\"poem\"><p>\
When I drive by I always think I see myself<br/>\
standing in the large picture window waving,<br/>\
wishing I’d stop by and have a spot of tea.\
</p><p>\
But I know its only what I want<br/>\
because I didn’t want to leave, you see,<br/>\
and when I drive by, smell the row<br/>\
of lilacs I planted along the road,<br/>\
see the gray smoke curling from the chimney,\
</p><p>\
I want to pull in and stop,<br/>\
pretend I never left, unload the groceries,<br/>\
stoke the fire, straighten the photos on the wall<br/>\
and wash the dishes that have stacked<br/>\
by the sink for the last ten years.\
</p><p>\
You’d be there, too, in your blue pajamas<br/>\
asking for a story. We’d climb the narrow<br/>\
staircase to your room and turn on the lamp,<br/>\
listening for a moment to the frogs outside,<br/>\
that bellowed thousands strong.\
</p><p>\
I’d read your Sweet Pickles books<sup>¹</sup><br/>\
and sing that Bumble Bee song you loved.<br/>\
Then we’d lay quietly and never grow old,<br/>\
while time went on without us, down<br/>\
the dusty country road, slipping over the horizon,<br/>\
leaving a soft orange glow for us to read by.<sup>²</sup>\
</p></blockquote>\
<p class=\"first\">\
In recent years I’ve tried to make the annual, three-hundred mile \
pilgrimage to “Troy Days.”<sup>³</sup> Starchy pancake-feed food, a couple \
fire trucks and horses paraded down main street, and an evening of \
under-age inebriation make a good time, of course, but my trip is \
not for those things. Troy Days is when and where my dad’s \
brothers reunite annually from their homes across the western U.S. \
In their company, my mind can visit our old house, find a place \
alongside my dad, my grandma and the rest seated around a fire, \
our eyes all reflecting the same eternal glow.</p>\
<p>{p3}</p>\
<ol class=\"footnotes\">\
<li><span>Wikipedia: {link1}</span></li>\
<li><span>Cheryl Reed, January 17, 2003: {link2}</span></li>\
</ol>",
p1 = P1,
p3 = P3,
link1 = link_urls(URL1),
link2 = link_urls(URL2)
);
assert_eq!(caption(&source), target);
}
}
|
use diesel::types::*;
use chrono::NaiveDateTime;
#[derive(QueryableByName,Serialize)]
pub(crate) struct TypedSalary {
#[sql_type = "Text"]
pub type_: String,
#[sql_type = "Int8"]
pub avg_salary: i64,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct AvgSalary{
#[sql_type = "Int8"]
pub avg_salary: i64,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct Ids{
#[sql_type = "Int4"]
pub id: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct ContractsEf{
#[sql_type = "Int4"]
pub id: i32,
#[sql_type = "Int4"]
pub ef: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct ExtendedDepHeads{
#[sql_type = "Text"]
pub department_name: String,
#[sql_type = "Int4"]
pub id: i32,
#[sql_type = "Text"]
pub firstname: String,
#[sql_type = "Text"]
pub secondname: String,
#[sql_type = "Text"]
pub familyname: String,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct CostSum{
#[sql_type = "Int4"]
pub cost_sum: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct DepartmentHeads{
#[sql_type = "Text"]
pub department_name: String,
#[sql_type = "Int4"]
pub id: i32,
#[sql_type = "Text"]
pub firstname: String,
#[sql_type = "Nullable<Text>"]
pub secondname: Option<String>,
#[sql_type = "Text"]
pub familyname: String,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct Stuff{
#[sql_type = "Int4"]
pub id: i32,
#[sql_type = "Text"]
pub firstname: String,
#[sql_type = "Nullable<Text>"]
pub secondname: Option<String>,
#[sql_type = "Text"]
pub familyname: String,
#[sql_type = "Int4"]
pub age: i32,
#[sql_type = "Text"]
pub worker_type: String,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct EquipmentByTime{
#[sql_type = "Int4"]
pub id: i32,
#[sql_type = "Text"]
pub name: String,
#[sql_type = "Int4"]
pub contract_id: i32,
#[sql_type = "Int4"]
pub project_id: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct FullContractData{
#[sql_type = "Int4"]
pub contract_id: i32,
#[sql_type = "Int8"]
pub cost: i64,
#[sql_type = "Timestamp"]
pub contract_start: NaiveDateTime,
#[sql_type = "Timestamp"]
pub contract_end: NaiveDateTime,
#[sql_type = "Int4"]
pub project_id: i32,
#[sql_type = "Int4"]
pub group_id: i32,
#[sql_type = "Int4"]
pub head_id: i32,
#[sql_type = "Timestamp"]
pub project_start: NaiveDateTime,
#[sql_type = "Timestamp"]
pub project_end: NaiveDateTime,
#[sql_type = "Int4"]
pub eq_list_id: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct ContractProject {
#[sql_type = "Int4"]
pub contract_id: i32,
#[sql_type = "Int4"]
pub project_id: i32,
}
#[derive(QueryableByName,Serialize)]
pub(crate) struct WorkDone {
#[sql_type = "Text"]
pub company_name: String,
#[sql_type = "Int4"]
pub contract_id: i32,
#[sql_type = "Int4"]
pub project_id: i32,
#[sql_type = "Int4"]
pub cost: i32,
} |
//! Anti Grain Geometry - Rust implementation
//!
//! Originally derived from version 2.4 of [AGG](http://antigrain.com)
//!
//! This crate implments the drawing / painting 2D algorithms developed in the Anti Grain Geometry C++ library. Quoting from the author in the documentation:
//!
//! > **Anti-Grain Geometry** is not a solid graphic library and it's not very easy to use. I consider **AGG** as a **"tool to create other tools"**. It means that there's no **"Graphics"** object or something like that, instead, **AGG** consists of a number of loosely coupled algorithms that can be used together or separately. All of them have well defined interfaces and absolute minimum of implicit or explicit dependencies.
//!
//!
//! # Anti-Aliasing and Subpixel Accuracy
//!
//! One primary strenght of AGG are the combination of drawing with subpixel accuracy with anti-aliasing effects. There are many examples within the documentation and reproduced here.
//!
//! # Drawing
//!
//! There are multiple ways to put / draw pixels including:
//!
//! - Scanline Renderers
//! - Antialiased or Aliased (Binary)
//! - Outline Renderer, possibly with Images
//! - Raw Pixel Manipulation
//!
//! # Scanline Renderer
//!
//! The multitude of renderers here include [`render_scanlines`],
//! [`render_all_paths`], [`render_scanlines_aa_solid`] and
//! [`render_scanlines_bin_solid`]
//!
//! use agg::Render;
//!
//! // Create a blank image 10x10 pixels
//! let pix = agg::Pixfmt::<agg::Rgb8>::new(100,100);
//! let mut ren_base = agg::RenderingBase::new(pix);
//! ren_base.clear(agg::Rgba8::white());
//!
//! // Draw a polygon from (10,10) - (50,90) - (90,10)
//! let mut ras = agg::RasterizerScanline::new();
//! ras.move_to(10.0, 10.0);
//! ras.line_to(50.0, 90.0);
//! ras.line_to(90.0, 10.0);
//!
//! // Render the line to the image
//! let mut ren = agg::RenderingScanlineAASolid::with_base(&mut ren_base);
//! ren.color(agg::Rgba8::black());
//! agg::render_scanlines(&mut ras, &mut ren);
//!
//! // Save the image to a file
//! ren_base.to_file("little_black_triangle.png").unwrap();
//!
//!
//! # Outline AntiAlias Renderer
//!
//! use agg::{Pixfmt,Rgb8,Rgba8,RenderingBase,DrawOutline};
//! use agg::{RendererOutlineAA,RasterizerOutlineAA};
//! let pix = Pixfmt::<Rgb8>::new(100,100);
//! let mut ren_base = agg::RenderingBase::new(pix);
//! ren_base.clear( Rgba8::new(255, 255, 255, 255) );
//!
//! let mut ren = RendererOutlineAA::with_base(&mut ren_base);
//! ren.color(agg::Rgba8::new(102,77,26,255));
//! ren.width(3.0);
//!
//! let mut path = agg::Path::new();
//! path.move_to(10.0, 10.0);
//! path.line_to(50.0, 90.0);
//! path.line_to(90.0, 10.0);
//!
//! let mut ras = RasterizerOutlineAA::with_renderer(&mut ren);
//! ras.add_path(&path);
//! ren_base.to_file("outline_aa.png").unwrap();
//!
//! # Primative Renderer
//!
//! Render for primative shapes: lines, rectangles, and ellipses; filled or
//! outlined.
//!
//! use agg::{Pixfmt,Rgb8,Rgba8,RenderingBase,DrawOutline};
//! use agg::{RendererPrimatives,RasterizerOutline};
//!
//! let pix = Pixfmt::<Rgb8>::new(100,100);
//! let mut ren_base = agg::RenderingBase::new(pix);
//! ren_base.clear( Rgba8::new(255, 255, 255, 255) );
//!
//! let mut ren = RendererPrimatives::with_base(&mut ren_base);
//! ren.line_color(agg::Rgba8::new(0,0,0,255));
//!
//! let mut path = agg::Path::new();
//! path.move_to(10.0, 10.0);
//! path.line_to(50.0, 90.0);
//! path.line_to(90.0, 10.0);
//!
//! let mut ras = RasterizerOutline::with_primative(&mut ren);
//! ras.add_path(&path);
//! ren_base.to_file("primative.png").unwrap();
//!
//!
//! # Raw Pixel Manipulation
//!
//! **Note:** Functions here are a somewhat low level interface and probably not what
//! you want to use.
//!
//! Functions to set pixel color through [`Pixfmt`] are [`clear`], [`set`], [`copy_pixel`],
//! [`copy_hline`], [`copy_vline`], [`fill`]
//!
//! Functions to blend colors with existing pixels through [`Pixfmt`] are [`copy_or_blend_pix`], [`copy_or_blend_pix_with_cover`], [`blend_hline`], [`blend_vline`], [`blend_solid_hspan`], [`blend_solid_vspan`], [`blend_color_hspan`], [`blend_color_vspan`]
//!
//!
//! [`Pixfmt`]: pixfmt/struct.Pixfmt.html
//! [`clear`]: pixfmt/struct.Pixfmt.html#method.clear
//! [`set`]: pixfmt/struct.Pixfmt.html#method.set
//! [`copy_pixel`]: pixfmt/struct.Pixfmt.html#method.copy_pixel
//! [`copy_hline`]: pixfmt/struct.Pixfmt.html#method.copy_hline
//! [`copy_vline`]: pixfmt/struct.Pixfmt.html#method.copy_vline
//! [`fill`]: pixfmt/trait.PixelDraw.html#method.fill
//! [`copy_or_blend_pix`]: pixfmt/trait.PixelDraw.html#method.copy_or_blend_pix
//! [`copy_or_blend_pix_with_cover`]: pixfmt/trait.PixelDraw.html#method.copy_or_blend_pix_with_cover
//! [`blend_hline`]: pixfmt/trait.PixelDraw.html#method.blend_hline
//! [`blend_vline`]: pixfmt/trait.PixelDraw.html#method.blend_vline
//! [`blend_solid_hspan`]: pixfmt/trait.PixelDraw.html#method.blend_solid_hspan
//! [`blend_solid_vspan`]: pixfmt/trait.PixelDraw.html#method.blend_solid_vspan
//! [`blend_color_hspan`]: pixfmt/trait.PixelDraw.html#method.blend_color_hspan
//! [`blend_color_vspan`]: pixfmt/trait.PixelDraw.html#method.blend_color_vspan
//! [`render_scanlines`]: render/fn.render_scanlines.html
//! [`render_all_paths`]: render/fn.render_all_paths.html
//! [`render_scanlines_aa_solid`]: render/fn.render_scanlines_aa_solid.html
//! [`render_scanlines_bin_solid`]: render/fn.render_scanlines_bin_solid.html
use std::fmt::Debug;
#[doc(hidden)]
pub use freetype as ft;
pub mod paths;
pub mod stroke;
pub mod transform;
pub mod color;
pub mod pixfmt;
pub mod base;
pub mod clip;
pub mod raster;
pub mod ppm;
pub mod alphamask;
pub mod render;
pub mod text;
pub mod outline;
pub mod outline_aa;
pub mod line_interp;
pub mod math;
pub(crate) mod scan;
pub(crate) mod buffer;
pub(crate) mod cell;
pub mod gallery;
#[doc(hidden)]
pub use crate::paths::*;
#[doc(hidden)]
pub use crate::stroke::*;
#[doc(hidden)]
pub use crate::transform::*;
#[doc(hidden)]
pub use crate::color::*;
#[doc(hidden)]
pub use crate::pixfmt::*;
#[doc(hidden)]
pub use crate::base::*;
#[doc(hidden)]
pub use crate::clip::*;
#[doc(hidden)]
pub use crate::raster::*;
#[doc(hidden)]
pub use crate::alphamask::*;
#[doc(hidden)]
pub use crate::render::*;
#[doc(hidden)]
pub use crate::text::*;
#[doc(hidden)]
pub use crate::line_interp::*;
#[doc(hidden)]
pub use crate::outline::*;
#[doc(hidden)]
pub use crate::outline_aa::*;
const POLY_SUBPIXEL_SHIFT : i64 = 8;
const POLY_SUBPIXEL_SCALE : i64 = 1<<POLY_SUBPIXEL_SHIFT;
const POLY_SUBPIXEL_MASK : i64 = POLY_SUBPIXEL_SCALE - 1;
const POLY_MR_SUBPIXEL_SHIFT : i64 = 4;
const MAX_HALF_WIDTH : usize = 64;
/// Source of vertex points
pub trait VertexSource {
/// Rewind the vertex source (unused)
fn rewind(&self) { }
/// Get values from the source
///
/// This could be turned into an iterator
fn xconvert(&self) -> Vec<Vertex<f64>>;
}
/// Access Color properties and compoents
pub trait Color: Debug + Copy {
/// Get red value [0,1] as f64
fn red(&self) -> f64;
/// Get green value [0,1] as f64
fn green(&self) -> f64;
/// Get blue value [0,1] as f64
fn blue(&self) -> f64;
/// Get alpha value [0,1] as f64
fn alpha(&self) -> f64;
/// Get red value [0,255] as u8
fn red8(&self) -> u8;
/// Get green value [0,255] as u8
fn green8(&self) -> u8;
/// Get blue value [0,255] as u8
fn blue8(&self) -> u8;
/// Get alpha value [0,255] as u8
fn alpha8(&self) -> u8;
/// Return if the color is completely transparent, alpha = 0.0
fn is_transparent(&self) -> bool { self.alpha() == 0.0 }
/// Return if the color is completely opaque, alpha = 1.0
fn is_opaque(&self) -> bool { self.alpha() >= 1.0 }
/// Return if the color has been premultiplied
fn is_premultiplied(&self) -> bool;
}
/// Render scanlines to Image
pub trait Render {
/// Render a single scanlines to the image
fn render(&mut self, data: &RenderData);
/// Set the Color of the Renderer
fn color<C: Color>(&mut self, color: C);
/// Prepare the Renderer
fn prepare(&self) { }
}
/*
/// Rasterize lines, path, and other things to scanlines
pub trait Rasterize {
/// Setup Rasterizer, returns if data is available
fn rewind_scanlines(&mut self) -> bool;
/// Sweeps cells in a scanline for data, returns if data is available
fn sweep_scanline(&mut self, sl: &mut ScanlineU8) -> bool;
/// Return maximum x value of rasterizer
fn min_x(&self) -> i64;
/// Return maximum x value of rasterizer
fn max_x(&self) -> i64;
/// Resets the rasterizer, clearing content
fn reset(&mut self);
/// Rasterize a path
fn add_path<VS: VertexSource>(&mut self, path: &VS);
}
*/
/// Access Pixel source color
pub trait Source {
fn get(&self, id: (usize, usize)) -> Rgba8;
}
/// Drawing and pixel related routines
pub trait Pixel {
fn cover_mask() -> u64;
fn bpp() -> usize;
fn as_bytes(&self) -> &[u8];
fn to_file<P: AsRef<std::path::Path>>(&self, filename: P) -> Result<(),std::io::Error>;
fn width(&self) -> usize;
fn height(&self) -> usize;
fn set<C: Color>(&mut self, id: (usize, usize), c: C);
fn setn<C: Color>(&mut self, id: (usize, usize), n: usize, c: C);
fn blend_pix<C: Color>(&mut self, id: (usize, usize), c: C, cover: u64);
/// Fill the data with the specified `color`
fn fill<C: Color>(&mut self, color: C);
/// Copy or blend a pixel at `id` with `color`
///
/// If `color` [`is_opaque`], the color is copied directly to the pixel,
/// otherwise the color is blended with the pixel at `id`
///
/// If `color` [`is_transparent`] nothing is done
///
/// [`is_opaque`]: ../trait.Color.html#method.is_opaque
/// [`is_transparent`]: ../trait.Color.html#method.is_transparent
fn copy_or_blend_pix<C: Color>(&mut self, id: (usize,usize), color: C) {
if ! color.is_transparent() {
if color.is_opaque() {
self.set(id, color);
} else {
self.blend_pix(id, color, 255);
}
}
}
/// Copy or blend a pixel at `id` with `color` and a `cover`
///
/// If `color` [`is_opaque`] *and* `cover` equals [`cover_mask`] then
/// the color is copied to the pixel at `id', otherwise the `color`
/// is blended with the pixel at `id' considering the amount of `cover`
///
/// If `color` [`is_transparent`] nothing is done
///
/// use agg::{Source,Pixfmt,Rgb8,Rgba8,Pixel};
///
/// let mut pix = Pixfmt::<Rgb8>::new(1,1);
/// let black = Rgba8::black();
/// let white = Rgba8::white();
/// pix.copy_pixel(0,0,black);
/// assert_eq!(pix.get((0,0)), black);
///
/// let (alpha, cover) = (255, 255); // Copy Pixel
/// let color = Rgba8::new(255,255,255,alpha);
/// pix.copy_or_blend_pix_with_cover((0,0), color, cover);
/// assert_eq!(pix.get((0,0)), white);
///
/// let (alpha, cover) = (255, 128); // Partial Coverage, Blend
/// let color = Rgba8::new(255,255,255,alpha);
/// pix.copy_pixel(0,0,black);
/// pix.copy_or_blend_pix_with_cover((0,0), color, cover);
/// assert_eq!(pix.get((0,0)), Rgba8::new(128,128,128,255));
///
/// let (alpha, cover) = (128, 255); // Partial Coverage, Blend
/// let color = Rgba8::new(255,255,255,alpha);
/// pix.copy_pixel(0,0,black);
/// pix.copy_or_blend_pix_with_cover((0,0), color, cover);
/// assert_eq!(pix.get((0,0)), Rgba8::new(128,128,128,255));
///
/// [`is_opaque`]: ../trait.Color.html#method.is_opaque
/// [`is_transparent`]: ../trait.Color.html#method.is_transparent
/// [`cover_mask`]: ../trait.Pixel.html#method.cover_mask
///
fn copy_or_blend_pix_with_cover<C: Color>(&mut self, id: (usize,usize), color: C, cover: u64) {
if ! color.is_transparent() {
if color.is_opaque() && cover == Self::cover_mask() {
self.set(id, color);
} else {
self.blend_pix(id, color, cover);
}
}
}
/// Copy or Blend a single `color` from (`x`,`y`) to (`x+len-1`,`y`)
/// with `cover`
///
fn blend_hline<C: Color>(&mut self, x: i64, y: i64, len: i64, color: C, cover: u64) {
if color.is_transparent() {
return;
}
let (x,y,len) = (x as usize, y as usize, len as usize);
if color.is_opaque() && cover == Self::cover_mask() {
self.setn((x,y), len, color);
} else {
for i in 0 .. len {
self.blend_pix((x+i,y),color,cover);
}
}
}
/// Blend a single `color` from (`x`,`y`) to (`x+len-1`,`y`) with collection
/// of `covers`
fn blend_solid_hspan<C: Color>(&mut self, x: i64, y: i64, len: i64, color: C, covers: &[u64]) {
assert_eq!(len as usize, covers.len());
for (i, &cover) in covers.iter().enumerate() {
self.blend_hline(x+i as i64,y,1,color,cover);
}
}
/// Copy or Blend a single `color` from (`x`,`y`) to (`x`,`y+len-1`)
/// with `cover`
///
fn blend_vline<C: Color>(&mut self, x: i64, y: i64, len: i64, c: C, cover: u64) {
if c.is_transparent() {
return;
}
let (x,y,len) = (x as usize, y as usize, len as usize);
if c.is_opaque() && cover == Self::cover_mask() {
for i in 0 .. len {
self.set((x,y+i),c);
}
} else {
for i in 0 .. len {
self.blend_pix((x,y+i),c,cover);
}
}
}
/// Blend a single `color` from (`x`,`y`) to (`x`,`y+len-1`) with collection
/// of `covers`
fn blend_solid_vspan<C: Color>(&mut self, x: i64, y: i64, len: i64, c: C, covers: &[u64]){
assert_eq!(len as usize, covers.len());
for (i, &cover) in covers.iter().enumerate() {
self.blend_vline(x,y+i as i64,1,c,cover);
}
}
/// Blend a collection of `colors` from (`x`,`y`) to (`x+len-1`,`y`) with
/// either a collection of `covers` or a single `cover`
///
/// A collection of `covers` takes precedance over a single `cover`
fn blend_color_hspan<C: Color>(&mut self, x: i64, y: i64, len: i64, colors: &[C], covers: &[u64], cover: u64) {
assert_eq!(len as usize, colors.len());
let (x,y) = (x as usize, y as usize);
if ! covers.is_empty() {
assert_eq!(colors.len(), covers.len());
for (i,(&color,&cover)) in colors.iter().zip(covers.iter()).enumerate() {
self.copy_or_blend_pix_with_cover((x+i,y), color, cover);
}
} else if cover == 255 {
for (i,&color) in colors.iter().enumerate() {
self.copy_or_blend_pix((x+i,y), color);
}
} else {
for (i,&color) in colors.iter().enumerate() {
self.copy_or_blend_pix_with_cover((x+i,y), color, cover);
}
}
}
/// Blend a collection of `colors` from (`x`,`y`) to (`x`,`y+len-1`) with
/// either a collection of `covers` or a single `cover`
///
/// A collection of `covers` takes precedance over a single `cover`
fn blend_color_vspan<C: Color>(&mut self, x: i64, y: i64, len: i64, colors: &[C], covers: &[u64], cover: u64) {
assert_eq!(len as usize, colors.len());
let (x,y) = (x as usize, y as usize);
if ! covers.is_empty() {
assert_eq!(colors.len(), covers.len());
for (i,(&color,&cover)) in colors.iter().zip(covers.iter()).enumerate() {
self.copy_or_blend_pix_with_cover((x,y+i), color, cover);
}
} else if cover == 255 {
for (i,&color) in colors.iter().enumerate() {
self.copy_or_blend_pix((x,y+i), color);
}
} else {
for (i,&color) in colors.iter().enumerate() {
self.copy_or_blend_pix_with_cover((x,y+i), color, cover);
}
}
}
}
pub(crate) trait LineInterp {
fn init(&mut self);
fn step_hor(&mut self);
fn step_ver(&mut self);
}
pub(crate) trait RenderOutline {
fn cover(&self, d: i64) -> u64;
fn blend_solid_hspan(&mut self, x: i64, y: i64, len: i64, covers: &[u64]);
fn blend_solid_vspan(&mut self, x: i64, y: i64, len: i64, covers: &[u64]);
}
/// Functions for Drawing Outlines
//pub trait DrawOutline: Lines + AccurateJoins + SetColor {}
pub trait DrawOutline {
/// Set the current Color
fn color<C: Color>(&mut self, color: C);
/// If Line Joins are Accurate
fn accurate_join_only(&self) -> bool;
fn line0(&mut self, lp: &LineParameters);
fn line1(&mut self, lp: &LineParameters, sx: i64, sy: i64);
fn line2(&mut self, lp: &LineParameters, ex: i64, ey: i64);
fn line3(&mut self, lp: &LineParameters, sx: i64, sy: i64, ex: i64, ey: i64);
fn semidot<F>(&mut self, cmp: F, xc1: i64, yc1: i64, xc2: i64, yc2: i64) where F: Fn(i64) -> bool;
fn pie(&mut self, xc: i64, y: i64, x1: i64, y1: i64, x2: i64, y2: i64);
}
pub(crate) trait DistanceInterpolator {
fn dist(&self) -> i64;
fn inc_x(&mut self, dy: i64);
fn inc_y(&mut self, dx: i64);
fn dec_x(&mut self, dy: i64);
fn dec_y(&mut self, dx: i64);
}
|
/*!
```rudra-poc
[target]
crate = "through"
version = "0.1.0"
[report]
issue_url = "https://github.com/gretchenfrage/through/issues/1"
issue_date = 2021-02-18
rustsec_url = "https://github.com/RustSec/advisory-db/pull/850"
rustsec_id = "RUSTSEC-2021-0049"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "PanicSafety"
bug_count = 2
rudra_report_locations = ["src/lib.rs:5:1: 12:2", "src/lib.rs:16:1: 24:2"]
```
!*/
#![forbid(unsafe_code)]
use through::through;
fn main() {
let mut hello = String::from("Hello");
let object = through(&mut hello, |mut s| {
s.push_str(" World!");
panic!("Unexpected panic");
s
});
dbg!(object);
}
|
use from_file::FromFile;
use presets::m2::module_meta_data::ModuleData;
use presets::m2::requirejs_config::RequireJsClientConfig;
#[derive(Serialize, Deserialize, Default)]
pub struct SeedData {
pub rjs_client_config: RequireJsClientConfig,
pub req_log: Vec<ModuleData>,
}
impl FromFile for SeedData {}
|
use super::Sha256;
#[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
// Intel® Architecture Instruction Set Extensions ProgrammingReference
//
// CHAPTER 8 INTEL® SHA EXTENSIONS
// https://software.intel.com/sites/default/files/managed/07/b7/319433-023.pdf
#[inline(always)]
pub fn transform(state: &mut [u32; 8], block: &[u8]) {
unsafe { transform_simd(state, block) }
}
#[target_feature(enable = "sha")]
unsafe fn transform_simd(state: &mut [u32; 8], block: &[u8]) {
// https://docs.rs/sha2ni/0.8.5/src/sha2ni/sha256_intrinsics.rs.html
//
// Process a block with the SHA-256 algorithm.
// Based on https://github.com/noloader/SHA-Intrinsics/blob/master/sha256-x86.c
debug_assert_eq!(state.len(), 8);
debug_assert_eq!(block.len(), Sha256::BLOCK_LEN);
let mut state0: __m128i;
let mut state1: __m128i;
let mut msg: __m128i;
let mut tmp: __m128i;
let mut msg0: __m128i;
let mut msg1: __m128i;
let mut msg2: __m128i;
let mut msg3: __m128i;
let abef_save: __m128i;
let cdgh_save: __m128i;
#[allow(non_snake_case)]
let MASK: __m128i = _mm_set_epi64x(
0x0c0d_0e0f_0809_0a0bu64 as i64,
0x0405_0607_0001_0203u64 as i64,
);
// Load initial values
tmp = _mm_loadu_si128(state.as_ptr().add(0) as *const __m128i);
state1 = _mm_loadu_si128(state.as_ptr().add(4) as *const __m128i);
tmp = _mm_shuffle_epi32(tmp, 0xB1); // CDAB
state1 = _mm_shuffle_epi32(state1, 0x1B); // EFGH
state0 = _mm_alignr_epi8(tmp, state1, 8); // ABEF
state1 = _mm_blend_epi16(state1, tmp, 0xF0); // CDGH
// Save current state
abef_save = state0;
cdgh_save = state1;
// Rounds 0-3
msg = _mm_loadu_si128(block.as_ptr() as *const __m128i);
msg0 = _mm_shuffle_epi8(msg, MASK);
msg = _mm_add_epi32(
msg0,
_mm_set_epi64x(0xE9B5DBA5B5C0FBCFu64 as i64, 0x71374491428A2F98u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
// Rounds 4-7
msg1 = _mm_loadu_si128(block.as_ptr().add(16) as *const __m128i);
msg1 = _mm_shuffle_epi8(msg1, MASK);
msg = _mm_add_epi32(
msg1,
_mm_set_epi64x(0xAB1C5ED5923F82A4u64 as i64, 0x59F111F13956C25Bu64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg0 = _mm_sha256msg1_epu32(msg0, msg1);
// Rounds 8-11
msg2 = _mm_loadu_si128(block.as_ptr().add(32) as *const __m128i);
msg2 = _mm_shuffle_epi8(msg2, MASK);
msg = _mm_add_epi32(
msg2,
_mm_set_epi64x(0x550C7DC3243185BEu64 as i64, 0x12835B01D807AA98u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg1 = _mm_sha256msg1_epu32(msg1, msg2);
// Rounds 12-15
msg3 = _mm_loadu_si128(block.as_ptr().add(48) as *const __m128i);
msg3 = _mm_shuffle_epi8(msg3, MASK);
msg = _mm_add_epi32(
msg3,
_mm_set_epi64x(0xC19BF1749BDC06A7u64 as i64, 0x80DEB1FE72BE5D74u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg3, msg2, 4);
msg0 = _mm_add_epi32(msg0, tmp);
msg0 = _mm_sha256msg2_epu32(msg0, msg3);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg2 = _mm_sha256msg1_epu32(msg2, msg3);
// Rounds 16-19
msg = _mm_add_epi32(
msg0,
_mm_set_epi64x(0x240CA1CC0FC19DC6u64 as i64, 0xEFBE4786E49B69C1u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg0, msg3, 4);
msg1 = _mm_add_epi32(msg1, tmp);
msg1 = _mm_sha256msg2_epu32(msg1, msg0);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg3 = _mm_sha256msg1_epu32(msg3, msg0);
// Rounds 20-23
msg = _mm_add_epi32(
msg1,
_mm_set_epi64x(0x76F988DA5CB0A9DCu64 as i64, 0x4A7484AA2DE92C6Fu64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg1, msg0, 4);
msg2 = _mm_add_epi32(msg2, tmp);
msg2 = _mm_sha256msg2_epu32(msg2, msg1);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg0 = _mm_sha256msg1_epu32(msg0, msg1);
// Rounds 24-27
msg = _mm_add_epi32(
msg2,
_mm_set_epi64x(0xBF597FC7B00327C8u64 as i64, 0xA831C66D983E5152u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg2, msg1, 4);
msg3 = _mm_add_epi32(msg3, tmp);
msg3 = _mm_sha256msg2_epu32(msg3, msg2);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg1 = _mm_sha256msg1_epu32(msg1, msg2);
// Rounds 28-31
msg = _mm_add_epi32(
msg3,
_mm_set_epi64x(0x1429296706CA6351u64 as i64, 0xD5A79147C6E00BF3u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg3, msg2, 4);
msg0 = _mm_add_epi32(msg0, tmp);
msg0 = _mm_sha256msg2_epu32(msg0, msg3);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg2 = _mm_sha256msg1_epu32(msg2, msg3);
// Rounds 32-35
msg = _mm_add_epi32(
msg0,
_mm_set_epi64x(0x53380D134D2C6DFCu64 as i64, 0x2E1B213827B70A85u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg0, msg3, 4);
msg1 = _mm_add_epi32(msg1, tmp);
msg1 = _mm_sha256msg2_epu32(msg1, msg0);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg3 = _mm_sha256msg1_epu32(msg3, msg0);
// Rounds 36-39
msg = _mm_add_epi32(
msg1,
_mm_set_epi64x(0x92722C8581C2C92Eu64 as i64, 0x766A0ABB650A7354u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg1, msg0, 4);
msg2 = _mm_add_epi32(msg2, tmp);
msg2 = _mm_sha256msg2_epu32(msg2, msg1);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg0 = _mm_sha256msg1_epu32(msg0, msg1);
// Rounds 40-43
msg = _mm_add_epi32(
msg2,
_mm_set_epi64x(0xC76C51A3C24B8B70u64 as i64, 0xA81A664BA2BFE8A1u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg2, msg1, 4);
msg3 = _mm_add_epi32(msg3, tmp);
msg3 = _mm_sha256msg2_epu32(msg3, msg2);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg1 = _mm_sha256msg1_epu32(msg1, msg2);
// Rounds 44-47
msg = _mm_add_epi32(
msg3,
_mm_set_epi64x(0x106AA070F40E3585u64 as i64, 0xD6990624D192E819u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg3, msg2, 4);
msg0 = _mm_add_epi32(msg0, tmp);
msg0 = _mm_sha256msg2_epu32(msg0, msg3);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg2 = _mm_sha256msg1_epu32(msg2, msg3);
// Rounds 48-51
msg = _mm_add_epi32(
msg0,
_mm_set_epi64x(0x34B0BCB52748774Cu64 as i64, 0x1E376C0819A4C116u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg0, msg3, 4);
msg1 = _mm_add_epi32(msg1, tmp);
msg1 = _mm_sha256msg2_epu32(msg1, msg0);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
msg3 = _mm_sha256msg1_epu32(msg3, msg0);
// Rounds 52-55
msg = _mm_add_epi32(
msg1,
_mm_set_epi64x(0x682E6FF35B9CCA4Fu64 as i64, 0x4ED8AA4A391C0CB3u64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg1, msg0, 4);
msg2 = _mm_add_epi32(msg2, tmp);
msg2 = _mm_sha256msg2_epu32(msg2, msg1);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
// Rounds 56-59
msg = _mm_add_epi32(
msg2,
_mm_set_epi64x(0x8CC7020884C87814u64 as i64, 0x78A5636F748F82EEu64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
tmp = _mm_alignr_epi8(msg2, msg1, 4);
msg3 = _mm_add_epi32(msg3, tmp);
msg3 = _mm_sha256msg2_epu32(msg3, msg2);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
// Rounds 60-63
msg = _mm_add_epi32(
msg3,
_mm_set_epi64x(0xC67178F2BEF9A3F7u64 as i64, 0xA4506CEB90BEFFFAu64 as i64),
);
state1 = _mm_sha256rnds2_epu32(state1, state0, msg);
msg = _mm_shuffle_epi32(msg, 0x0E);
state0 = _mm_sha256rnds2_epu32(state0, state1, msg);
// Combine state
state0 = _mm_add_epi32(state0, abef_save);
state1 = _mm_add_epi32(state1, cdgh_save);
tmp = _mm_shuffle_epi32(state0, 0x1B); // FEBA
state1 = _mm_shuffle_epi32(state1, 0xB1); // DCHG
state0 = _mm_blend_epi16(tmp, state1, 0xF0); // DCBA
state1 = _mm_alignr_epi8(state1, tmp, 8); // ABEF
// Save state
_mm_storeu_si128(state.as_mut_ptr().add(0) as *mut __m128i, state0);
_mm_storeu_si128(state.as_mut_ptr().add(4) as *mut __m128i, state1);
}
|
use serde::{Deserialize, Serialize};
use syn;
#[derive(Serialize, Deserialize, Debug, Hash, PartialEq, Eq)]
pub struct Signature {
pub name: String,
}
pub fn syn_sig_to_reduced(sig: &syn::Signature) -> Signature {
Signature {
name: format!("{}", sig.ident),
}
}
|
use crate::bundle_producer_election_solver::BundleProducerElectionSolver;
use crate::domain_bundle_proposer::DomainBundleProposer;
use crate::parent_chain::ParentChainInterface;
use crate::utils::OperatorSlotInfo;
use crate::BundleSender;
use codec::Decode;
use domain_runtime_primitives::DomainCoreApi;
use sc_client_api::{AuxStore, BlockBackend};
use sp_api::{NumberFor, ProvideRuntimeApi};
use sp_block_builder::BlockBuilder;
use sp_blockchain::{HashAndNumber, HeaderBackend};
use sp_domains::{
Bundle, BundleProducerElectionApi, DomainId, DomainsApi, OperatorPublicKey, OperatorSignature,
SealedBundleHeader,
};
use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, One, Saturating, Zero};
use sp_runtime::RuntimeAppPublic;
use std::convert::{AsRef, Into};
use std::marker::PhantomData;
use std::sync::Arc;
use subspace_runtime_primitives::Balance;
use tracing::info;
type OpaqueBundle<Block, CBlock> = sp_domains::OpaqueBundle<
NumberFor<CBlock>,
<CBlock as BlockT>::Hash,
NumberFor<Block>,
<Block as BlockT>::Hash,
Balance,
>;
pub(super) struct DomainBundleProducer<
Block,
CBlock,
ParentChainBlock,
Client,
CClient,
ParentChain,
TransactionPool,
> where
Block: BlockT,
CBlock: BlockT,
{
domain_id: DomainId,
consensus_client: Arc<CClient>,
client: Arc<Client>,
parent_chain: ParentChain,
bundle_sender: Arc<BundleSender<Block, CBlock>>,
keystore: KeystorePtr,
bundle_producer_election_solver: BundleProducerElectionSolver<Block, CBlock, CClient>,
domain_bundle_proposer: DomainBundleProposer<Block, Client, CBlock, CClient, TransactionPool>,
_phantom_data: PhantomData<ParentChainBlock>,
}
impl<Block, CBlock, ParentChainBlock, Client, CClient, ParentChain, TransactionPool> Clone
for DomainBundleProducer<
Block,
CBlock,
ParentChainBlock,
Client,
CClient,
ParentChain,
TransactionPool,
>
where
Block: BlockT,
CBlock: BlockT,
ParentChain: Clone,
{
fn clone(&self) -> Self {
Self {
domain_id: self.domain_id,
consensus_client: self.consensus_client.clone(),
client: self.client.clone(),
parent_chain: self.parent_chain.clone(),
bundle_sender: self.bundle_sender.clone(),
keystore: self.keystore.clone(),
bundle_producer_election_solver: self.bundle_producer_election_solver.clone(),
domain_bundle_proposer: self.domain_bundle_proposer.clone(),
_phantom_data: self._phantom_data,
}
}
}
impl<Block, CBlock, ParentChainBlock, Client, CClient, ParentChain, TransactionPool>
DomainBundleProducer<
Block,
CBlock,
ParentChainBlock,
Client,
CClient,
ParentChain,
TransactionPool,
>
where
Block: BlockT,
CBlock: BlockT,
ParentChainBlock: BlockT,
NumberFor<Block>: Into<NumberFor<CBlock>>,
NumberFor<ParentChainBlock>: Into<NumberFor<Block>>,
Client: HeaderBackend<Block> + BlockBackend<Block> + AuxStore + ProvideRuntimeApi<Block>,
Client::Api: BlockBuilder<Block> + DomainCoreApi<Block>,
CClient: HeaderBackend<CBlock> + ProvideRuntimeApi<CBlock>,
CClient::Api: DomainsApi<CBlock, NumberFor<Block>, Block::Hash>
+ BundleProducerElectionApi<CBlock, Balance>,
ParentChain: ParentChainInterface<Block, ParentChainBlock> + Clone,
TransactionPool: sc_transaction_pool_api::TransactionPool<Block = Block>,
{
pub(super) fn new(
domain_id: DomainId,
consensus_client: Arc<CClient>,
client: Arc<Client>,
parent_chain: ParentChain,
domain_bundle_proposer: DomainBundleProposer<
Block,
Client,
CBlock,
CClient,
TransactionPool,
>,
bundle_sender: Arc<BundleSender<Block, CBlock>>,
keystore: KeystorePtr,
) -> Self {
let bundle_producer_election_solver = BundleProducerElectionSolver::<Block, CBlock, _>::new(
keystore.clone(),
consensus_client.clone(),
);
Self {
domain_id,
consensus_client,
client,
parent_chain,
bundle_sender,
keystore,
bundle_producer_election_solver,
domain_bundle_proposer,
_phantom_data: PhantomData,
}
}
pub(super) async fn produce_bundle(
self,
consensus_block_info: HashAndNumber<CBlock>,
slot_info: OperatorSlotInfo,
) -> sp_blockchain::Result<Option<OpaqueBundle<Block, CBlock>>> {
let OperatorSlotInfo {
slot,
global_randomness,
} = slot_info;
let best_receipt_is_written = crate::aux_schema::consensus_block_hash_for::<
_,
_,
CBlock::Hash,
>(&*self.client, self.client.info().best_hash)?
.is_some();
// TODO: remove once the receipt generation can be done before the domain block is
// committed to the database, in other words, only when the receipt of block N+1 has
// been generated can the `client.info().best_number` be updated from N to N+1.
//
// This requires:
// 1. Reimplement `runtime_api.intermediate_roots()` on the client side.
// 2. Add a hook before the upstream `client.commit_operation(op)`.
let domain_best_number = if best_receipt_is_written {
self.client.info().best_number
} else {
self.client.info().best_number.saturating_sub(One::one())
};
let parent_chain_best_hash = self.parent_chain.best_hash();
let should_skip_slot = {
let head_receipt_number = self
.parent_chain
.head_receipt_number(parent_chain_best_hash)?;
// Operator is lagging behind the receipt chain on its parent chain as another operator
// already processed a block higher than the local best and submitted the receipt to
// the parent chain, we ought to catch up with the consensus block processing before
// producing new bundle.
!domain_best_number.is_zero() && domain_best_number <= head_receipt_number
};
if should_skip_slot {
tracing::warn!(
?domain_best_number,
"Skipping bundle production on slot {slot}"
);
return Ok(None);
}
if let Some((proof_of_election, operator_signing_key)) =
self.bundle_producer_election_solver.solve_challenge(
slot,
consensus_block_info.hash,
self.domain_id,
global_randomness,
)?
{
tracing::info!("📦 Claimed bundle at slot {slot}");
let tx_range = self
.consensus_client
.runtime_api()
.domain_tx_range(consensus_block_info.hash, self.domain_id)
.map_err(|error| {
sp_blockchain::Error::Application(Box::from(format!(
"Error getting tx range: {error}"
)))
})?;
let (bundle_header, extrinsics) = self
.domain_bundle_proposer
.propose_bundle_at(proof_of_election, self.parent_chain.clone(), tx_range)
.await?;
// if there are no extrinsics and no receipts to confirm, skip the bundle
if extrinsics.is_empty()
&& !self
.parent_chain
.non_empty_er_exists(parent_chain_best_hash, self.domain_id)?
{
tracing::warn!(
?domain_best_number,
"Skipping bundle production on slot {slot}"
);
return Ok(None);
}
info!("🔖 Producing bundle at slot {:?}", slot_info.slot);
let to_sign = bundle_header.hash();
let signature = self
.keystore
.sr25519_sign(
OperatorPublicKey::ID,
operator_signing_key.as_ref(),
to_sign.as_ref(),
)
.map_err(|error| {
sp_blockchain::Error::Application(Box::from(format!(
"Error occurred when signing the bundle: {error}"
)))
})?
.ok_or_else(|| {
sp_blockchain::Error::Application(Box::from(
"This should not happen as the existence of key was just checked",
))
})?;
let signature = OperatorSignature::decode(&mut signature.as_ref()).map_err(|err| {
sp_blockchain::Error::Application(Box::from(format!(
"Failed to decode the signature of bundle: {err}"
)))
})?;
let bundle = Bundle {
sealed_header: SealedBundleHeader::new(bundle_header, signature),
extrinsics,
};
// TODO: Re-enable the bundle gossip over X-Net when the compact bundle is supported.
// if let Err(e) = self.bundle_sender.unbounded_send(signed_bundle.clone()) {
// tracing::error!(error = ?e, "Failed to send transaction bundle");
// }
Ok(Some(bundle.into_opaque_bundle()))
} else {
Ok(None)
}
}
}
|
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate cgmath;
extern crate futures;
extern crate grpcio;
extern crate point_viewer;
extern crate protobuf;
include!(concat!(env!("OUT_DIR"), "/proto.rs"));
include!(concat!(env!("OUT_DIR"), "/proto_grpc.rs"));
use cgmath::{Matrix4, Point3};
use grpcio::{ChannelBuilder, EnvBuilder};
use point_viewer::errors::*;
use point_viewer::math::Cube;
use point_viewer::octree::{NodeData, NodeId, NodeMeta, Octree, PositionEncoding, UseLod,
VisibleNode};
use proto_grpc::OctreeClient;
use std::sync::Arc;
pub struct GrpcOctree {
client: OctreeClient,
root_bounding_cube: Cube,
}
impl GrpcOctree {
pub fn new(addr: &str) -> Self {
let env = Arc::new(EnvBuilder::new().build());
let ch = ChannelBuilder::new(env).connect(addr);
let client = OctreeClient::new(ch);
let reply = client
.get_root_bounding_cube(&proto::GetRootBoundingCubeRequest::new())
.expect("rpc");
let root_bounding_cube = {
let proto = reply.bounding_cube.as_ref().unwrap();
let min = proto.min.as_ref().unwrap();
Cube::new(Point3::new(min.x, min.y, min.z), proto.edge_length)
};
GrpcOctree {
client,
root_bounding_cube,
}
}
}
impl Octree for GrpcOctree {
fn get_visible_nodes(
&self,
projection_matrix: &Matrix4<f32>,
width: i32,
height: i32,
_: UseLod,
) -> Vec<VisibleNode> {
// TODO(sirver): remove UseLod from the interface and leave this to the client.
let mut req = proto::GetVisibleNodesRequest::new();
req.mut_projection_matrix().extend_from_slice(&[
projection_matrix.x[0],
projection_matrix.x[1],
projection_matrix.x[2],
projection_matrix.x[3],
projection_matrix.y[0],
projection_matrix.y[1],
projection_matrix.y[2],
projection_matrix.y[3],
projection_matrix.z[0],
projection_matrix.z[1],
projection_matrix.z[2],
projection_matrix.z[3],
projection_matrix.w[0],
projection_matrix.w[1],
projection_matrix.w[2],
projection_matrix.w[3],
]);
req.set_width(width);
req.set_height(height);
// TODO(sirver): This should most definitively not crash, but instead return an error.
// Needs changes to the trait though.
let reply = self.client.get_visible_nodes(&req).expect("rpc");
let mut result = Vec::new();
for node in &reply.node_ids {
result.push(VisibleNode::new(
NodeId::from_str(&node),
1, /* level_of_detail */
));
}
result
}
fn get_node_data(&self, node_id: &NodeId, level_of_detail: i32) -> Result<NodeData> {
assert_eq!(level_of_detail, 1);
// TODO(sirver): We ignore 'level_of_detail'. Hoist out of the interface and let the client
// deal with it.
let mut req = proto::GetNodeDataRequest::new();
req.set_id(node_id.to_string());
// TODO(sirver): This should most definitively not crash, but instead return an error.
// Needs changes to the trait though.
let reply = self.client.get_node_data(&req).expect("rpc");
let node = reply.node.unwrap();
let result = NodeData {
position: reply.position,
color: reply.color,
meta: NodeMeta {
num_points: node.num_points,
position_encoding: PositionEncoding::from_proto(node.position_encoding).unwrap(),
bounding_cube: node_id.find_bounding_cube(&self.root_bounding_cube),
},
};
Ok(result)
}
}
|
use crate::components::wiring;
use crate::components::NANDGate;
pub struct XORGate {
pub input1: wiring::Wire,
pub input2: wiring::Wire,
pub output: wiring::Wire,
nand1_layer1: NANDGate,
nand1_layer2: NANDGate,
nand2_layer2: NANDGate,
nand1_layer3: NANDGate,
}
impl Default for XORGate {
fn default() -> Self {
XORGate {
input1: wiring::Wire::default(),
input2: wiring::Wire::default(),
output: wiring::Wire::default(),
nand1_layer1: NANDGate::default(),
nand1_layer2: NANDGate::default(),
nand2_layer2: NANDGate::default(),
nand1_layer3: NANDGate::default(),
}
}
}
impl XORGate {
pub fn settle(&mut self) {
wiring::connect(&mut self.nand1_layer1.input1, self.input1.clone());
wiring::connect(&mut self.nand1_layer1.input2, self.input2.clone());
wiring::connect(&mut self.nand1_layer2.input1, self.input1.clone());
wiring::connect(&mut self.nand1_layer2.input2, self.nand1_layer1.output.clone());
wiring::connect(&mut self.nand2_layer2.input1, self.nand1_layer1.output.clone());
wiring::connect(&mut self.nand2_layer2.input2, self.input2.clone());
wiring::connect(&mut self.nand1_layer3.input1, self.nand1_layer2.output.clone());
wiring::connect(&mut self.nand1_layer3.input2, self.nand2_layer2.output.clone());
wiring::connect(&mut self.nand1_layer3.output, self.output.clone());
self.nand1_layer1.settle();
self.nand1_layer2.settle();
self.nand2_layer2.settle();
self.nand1_layer3.settle();
}
}
mod tests {
use super::*;
#[test]
fn false_and_false() {
let mut xor_gate = XORGate::default();
wiring::set_low(&mut xor_gate.input1);
wiring::set_low(&mut xor_gate.input2);
xor_gate.settle();
let output_lock = xor_gate.output.lock().unwrap();
assert_eq!(*output_lock, false);
}
#[test]
fn true_and_false() {
let mut xor_gate = XORGate::default();
wiring::set_high(&mut xor_gate.input1);
wiring::set_low(&mut xor_gate.input2);
xor_gate.settle();
let output_lock = xor_gate.output.lock().unwrap();
assert_eq!(*output_lock, true);
}
#[test]
fn false_and_true() {
let mut xor_gate = XORGate::default();
wiring::set_low(&mut xor_gate.input1);
wiring::set_high(&mut xor_gate.input2);
xor_gate.settle();
let output_lock = xor_gate.output.lock().unwrap();
assert_eq!(*output_lock, true);
}
#[test]
fn true_and_true() {
let mut xor_gate = XORGate::default();
wiring::set_high(&mut xor_gate.input1);
wiring::set_high(&mut xor_gate.input2);
xor_gate.settle();
let output_lock = xor_gate.output.lock().unwrap();
assert_eq!(*output_lock, false);
}
}
|
use core::{mem, ptr};
use winapi::ctypes::c_void;
use error_code::SystemError;
use crate::SysResult;
const GHND: winapi::ctypes::c_uint = 0x42;
const BYTES_LAYOUT: alloc::alloc::Layout = alloc::alloc::Layout::new::<u8>();
#[cold]
#[inline(never)]
pub fn unlikely_empty_size_result<T: Default>() -> T {
Default::default()
}
#[cold]
#[inline(never)]
pub fn unlikely_last_error() -> SystemError {
SystemError::last()
}
#[inline]
fn noop(_: *mut c_void) {
}
#[inline]
fn free_rust_mem(data: *mut c_void) {
unsafe {
alloc::alloc::dealloc(data as _, BYTES_LAYOUT)
}
}
#[inline]
fn unlock_data(data: *mut c_void) {
unsafe {
winapi::um::winbase::GlobalUnlock(data);
}
}
#[inline]
fn free_global_mem(data: *mut c_void) {
unsafe {
winapi::um::winbase::GlobalFree(data);
}
}
pub struct Scope<T: Copy>(pub T, pub fn(T));
impl<T: Copy> Drop for Scope<T> {
#[inline(always)]
fn drop(&mut self) {
(self.1)(self.0)
}
}
pub struct RawMem(Scope<*mut c_void>);
impl RawMem {
#[inline(always)]
pub fn new_rust_mem(size: usize) -> SysResult<Self> {
let mem = unsafe {
alloc::alloc::alloc_zeroed(alloc::alloc::Layout::array::<u8>(size).expect("To create layout for bytes"))
};
if mem.is_null() {
Err(unlikely_last_error())
} else {
Ok(Self(Scope(mem as _, free_rust_mem)))
}
}
#[inline(always)]
pub fn new_global_mem(size: usize) -> SysResult<Self> {
unsafe {
let mem = winapi::um::winbase::GlobalAlloc(GHND, size as _);
if mem.is_null() {
Err(unlikely_last_error())
} else {
Ok(Self(Scope(mem, free_global_mem)))
}
}
}
#[inline(always)]
pub fn from_borrowed(ptr: ptr::NonNull<c_void>) -> Self {
Self(Scope(ptr.as_ptr(), noop))
}
#[inline(always)]
pub fn get(&self) -> *mut c_void {
(self.0).0
}
#[inline(always)]
pub fn release(self) {
mem::forget(self)
}
pub fn lock(&self) -> SysResult<(ptr::NonNull<c_void>, Scope<*mut c_void>)> {
let ptr = unsafe {
winapi::um::winbase::GlobalLock(self.get())
};
match ptr::NonNull::new(ptr) {
Some(ptr) => Ok((ptr, Scope(self.get(), unlock_data))),
None => Err(error_code::SystemError::last()),
}
}
}
|
fn format_song() -> String {
return String::new();
}
fn main() {
let gift_1 = "A partridge in a pear tree";
let gift_2 = "Two turtle doves";
let gift_3 = "Three French hens";
let gift_4 = "Four calling birds";
let gift_5 = "Five gold rings";
let gift_6 = "Six geese a laying";
let gift_7 = "Seven swans a swimming";
let gift_8 = "Eight maids a milking";
let gift_9 = "Nine ladies dancing";
let gift_10 = "Ten lords a leaping";
let gift_11 = "Eleven pipers piping";
let gift_12 = "12 drummers drumming";
println!("The 12 Days Of Christmas");
}
|
fn main() {
let target = std::env::var("TARGET").unwrap();
if target.contains("-ios") {
println!("cargo:rustc-link-lib=framework=UIKit");
println!("cargo:rustc-link-lib=framework=WebKit");
}
}
|
use crate::{
components::player::{ActionType, PlayerType},
config,
engines::{Engine, EngineData, EngineTransition},
utils,
};
use amethyst::core::math::Vector2;
use rand::{thread_rng, Rng};
use rand_distr::StandardNormal;
pub struct Basic;
impl Basic {
pub fn new() -> Self {
Basic
}
}
const REPOSITION_SIZE: f32 = 5.0;
const COLLISION_DISTANCE_TO_BALL: f32 = 4.0;
const CLOSE_ENOUGH_KICK: f32 = 5.0;
fn defend_with_net_weight(engine_data: EngineData, net_weight: f32) -> EngineTransition {
let intended_position = (engine_data.own_net_position * net_weight + engine_data.ball_position)
/ (net_weight + 1.0);
let velocity = intended_position - engine_data.own_position;
EngineTransition {
velocity: if velocity.norm() > 1.0 {
(velocity / velocity.norm()) * engine_data.own.speed
} else {
Vector2::<f32>::new(0.0, 0.0)
},
action: None,
}
}
fn forward_basic(engine_data: EngineData, close_enough: f32) -> EngineTransition {
let ball_diff_net = engine_data.ball_position - engine_data.opponent_net_position;
let own_diff_net = engine_data.own_position - engine_data.opponent_net_position;
let ball_direction = engine_data.ball_position - engine_data.own_position;
let reposition_unsized = Vector2::new(ball_direction.y, -ball_direction.x);
let reposition = (reposition_unsized / reposition_unsized.norm())
* (engine_data.own.speed / REPOSITION_SIZE);
let (action, velocity) = {
if ball_diff_net.norm() >= own_diff_net.norm() {
(None, ball_direction + reposition)
} else {
// Move towards the ball and kick it!
// Try to have the right angle to put the ball in the net.
let ball_angle = ball_diff_net.x / ball_diff_net.y;
let kick_x_diff_net =
ball_angle * ball_diff_net.y + COLLISION_DISTANCE_TO_BALL.copysign(ball_diff_net.x);
let net_x = engine_data.opponent_net_position.x;
let perfect_kick_x = net_x + kick_x_diff_net;
let adjust_kick_angle = perfect_kick_x - engine_data.own_position.x;
// Compute whether we're close to where we'd need to be to get a good kick out. Used to
// choose whether to have a Kick action.
let x_difference_kick =
net_x + (ball_angle * own_diff_net.y) - engine_data.own_position.x;
(
if x_difference_kick.abs() < close_enough {
Some(ActionType::Kick)
} else {
None
},
ball_direction + Vector2::new(adjust_kick_angle, 0.0) + reposition,
)
}
};
// Add a little randomness to it, mostly to avoid deadlocks with
// players bumping against each other.
let noise_x: f32 = thread_rng().sample(StandardNormal);
let noise_y: f32 = thread_rng().sample(StandardNormal);
EngineTransition {
velocity: (velocity / velocity.norm()) * engine_data.own.speed
+ Vector2::new(noise_x, noise_y),
action: action,
}
}
impl Engine for Basic {
fn goalie(&mut self, engine_data: EngineData) -> EngineTransition {
defend_with_net_weight(engine_data, 2.0)
}
fn defender(&mut self, engine_data: EngineData) -> EngineTransition {
defend_with_net_weight(engine_data, 0.7)
}
fn left(&mut self, engine_data: EngineData) -> EngineTransition {
self.forward(engine_data)
}
fn right(&mut self, engine_data: EngineData) -> EngineTransition {
self.forward(engine_data)
}
fn forward(&mut self, engine_data: EngineData) -> EngineTransition {
forward_basic(engine_data, CLOSE_ENOUGH_KICK)
}
}
#[allow(dead_code)]
fn do_nothing(_engine_data: EngineData) -> EngineTransition {
EngineTransition {
action: None,
velocity: Vector2::new(0.0, 0.0),
}
}
/// Go to a spot relative to your own net.
fn go_to_spot(engine_data: EngineData, x: f32, y: f32) -> EngineTransition {
if engine_data.own.side == utils::Side::UpperSide {
EngineTransition {
action: None,
velocity: Vector2::new(x, config::SCREEN_HEIGHT - y) - engine_data.own_position,
}
} else {
EngineTransition {
action: None,
velocity: Vector2::new(x, y) - engine_data.own_position,
}
}
}
const WING_CENTER_MAX_DIFFERENCE: f32 = config::SCREEN_HEIGHT / 6.0;
fn wing_wait(
engine_data: EngineData,
otherwise: fn(EngineData) -> EngineTransition,
) -> EngineTransition {
let own_ball_dist = (engine_data.ball_position - engine_data.own_position).norm();
for (player_type, position) in &engine_data.teammates_position {
if *player_type == PlayerType::Goalie || *player_type == PlayerType::Defender {
continue;
}
let local_ball_dist = (engine_data.ball_position - position).norm();
if local_ball_dist < own_ball_dist {
return otherwise(engine_data);
}
}
let winger_y_dist = (engine_data.own_position.y - engine_data.opponent_net_position.y).abs();
// Be ready to kick the ball forward more often.
let forward_transition = forward_basic(engine_data, CLOSE_ENOUGH_KICK * 2.0);
if winger_y_dist < WING_CENTER_MAX_DIFFERENCE {
EngineTransition {
action: Some(ActionType::Kick),
velocity: forward_transition.velocity,
}
} else {
forward_transition
}
}
pub struct BasicWingWait;
impl BasicWingWait {
pub fn new() -> Self {
BasicWingWait
}
}
impl Engine for BasicWingWait {
fn goalie(&mut self, engine_data: EngineData) -> EngineTransition {
let normal_defense = defend_with_net_weight(engine_data, 2.0);
EngineTransition {
action: Some(ActionType::Kick),
velocity: normal_defense.velocity,
}
}
fn defender(&mut self, engine_data: EngineData) -> EngineTransition {
wing_wait(engine_data, |x| defend_with_net_weight(x, 0.7))
}
fn left(&mut self, engine_data: EngineData) -> EngineTransition {
wing_wait(engine_data, |x| {
go_to_spot(x, config::SCREEN_WIDTH / 3.0, config::SCREEN_HEIGHT / 2.0)
})
}
fn right(&mut self, engine_data: EngineData) -> EngineTransition {
wing_wait(engine_data, |x| {
go_to_spot(
x,
2.0 * config::SCREEN_WIDTH / 3.0,
config::SCREEN_HEIGHT / 2.0,
)
})
}
fn forward(&mut self, engine_data: EngineData) -> EngineTransition {
wing_wait(engine_data, |x| {
go_to_spot(
x,
config::SCREEN_WIDTH / 2.0,
2.0 * config::SCREEN_HEIGHT / 3.0,
)
})
}
}
|
use std::fs::File;
use std::io::Write;
pub fn mov(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm mov: Unable to write to the file.");
write!(f, "mov {}, {}\n", des, src).expect("asm: Unable to write to the file.");
}
pub fn ret(f: &mut File) {
write!(f, " ").expect("asm ret: stat_return: Unable to write to the file.");
write!(f, "ret\n").expect("asm ret: stat_return: Unable to write to the file.");
}
pub fn neg(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "neg {}\n", des).expect("asm neg: Unable to write to the file.");
}
pub fn not(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "not {}\n", des).expect("asm neg: Unable to write to the file.");
}
pub fn cmp(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "cmp {}, {}\n", des, src).expect("asm neg: Unable to write to the file.");
}
pub fn sete(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "sete {}\n", des).expect("asm neg: Unable to write to the file.");
} |
#![allow(unused)]
use std::{
error::Error,
io::{self, BufRead, Read, Write},
};
fn main() -> Result<(), Box<dyn Error>> {
let mut buffer = String::new();
let mut buff = Vec::new();
let mut stdin = io::stdin();
stdin.read_line(&mut buffer)?;
io::stdin().lock().read_until(b'\n', &mut buff)?;
io::stdout().write_all(&buff)?;
io::stdout().write_all(&buff)?;
println!("{}", buffer);
io::stdout().flush();
Ok(())
}
|
extern crate tch;
use std::f64::consts::PI;
use tch::{kind, Tensor};
use crate::{Distribution, TensorUtil};
fn _batch_mv(bmat: &Tensor, bvec: &Tensor) -> Tensor {
bmat.matmul(&bvec.unsqueeze(-1)).squeeze1(-1)
}
fn _batch_mahalanobis(bl: &Tensor, bx: &Tensor) -> Tensor {
let n = bx.size().last().unwrap().clone();
let bx_batch_shape = &bx.size()[..(bx.size().len() - 1)];
// Assume that bL.shape = (i, 1, n, n), bx.shape = (..., i, j, n),
// we are going to make bx have shape (..., 1, j, i, 1, n) to apply batched tri.solve
let bx_batch_dims = bx_batch_shape.len();
let bl_batch_dims = bl.dim() - 2;
let outer_batch_dims = bx_batch_dims - bl_batch_dims;
let old_batch_dims = outer_batch_dims + bl_batch_dims;
let new_batch_dims = outer_batch_dims + 2 * bl_batch_dims;
// Reshape bx with the shape (..., 1, i, j, 1, n)
let mut bx_new_shape = bx.size()[..outer_batch_dims].to_owned();
for (sl, sx) in bl.size()[..bl_batch_dims].iter()
.zip(&bx.size()[outer_batch_dims..bx_batch_dims]) {
bx_new_shape.extend_from_slice(&[sx / sl, sl.to_owned()]);
}
bx_new_shape.push(n);
let bx = bx.reshape(bx_new_shape.as_slice());
// Permute bx to make it have shape (..., 1, j, i, 1, n)
let mut permute_dims = Vec::new();
permute_dims.extend((0..outer_batch_dims).collect::<Vec<_>>());
permute_dims.extend((outer_batch_dims..new_batch_dims).step_by(2)
.collect::<Vec<_>>());
permute_dims.extend((outer_batch_dims + 1..new_batch_dims).step_by(2)
.collect::<Vec<_>>());
permute_dims.push(new_batch_dims);
let permute_dims =
permute_dims.iter().map(|x| *x as i64).collect::<Vec<_>>();
let bx = bx.permute(permute_dims.as_slice());
let flat_l = bl.reshape(&[-1, n, n]); // shape = b x n x n
let flat_x = bx.reshape(&[-1, flat_l.size()[0], n]); // shape = c x b x n
let flat_x_swap = flat_x.permute(&[1, 2, 0]); // shape = b x n x c
let m_swap = flat_x_swap.triangular_solve(&flat_l, false, false, false);
let m_swap = m_swap.0.pow(2).sum2(&[-2], false); // shape = b x c
let m = m_swap.transpose(0, 1);
// Now we revert the above reshape and permute operators.
let permuted_m_shape = &bx.size()[0..(bx.size().len() - 1)];
let permuted_m = m.reshape(permuted_m_shape); // shape = (..., 1, j, i, 1)
let mut permute_inv_dims = (0..outer_batch_dims).collect::<Vec<_>>();
for i in 0..bl_batch_dims {
permute_inv_dims.extend_from_slice(&[outer_batch_dims + i, old_batch_dims + i]);
}
let permute_inv_dims =
permute_inv_dims.iter().map(|x| *x as i64).collect::<Vec<_>>();
let reshaped_m = permuted_m.permute(permute_inv_dims.as_slice()); // shape = (..., 1, i, j, 1)
let reshaped_m = reshaped_m.reshape(bx_batch_shape);
// println!("n = {}", n);
// println!("bx_batch_shape = {:?}", bx_batch_shape);
// println!("bL_batch_dims = {}", bl_batch_dims);
// println!("outer_batch_dims = {}", outer_batch_dims);
// println!("old_batch_dims = {}", old_batch_dims);
// println!("new_batch_dims = {}", new_batch_dims);
// println!("bx_new_shape = {:?}", bx_new_shape);
// println!("permute_dims = {:?}", permute_dims);
// println!("flat_L.size() = {:?}", flat_l.size());
// println!("flat_x.size() = {:?}", flat_x.size());
// println!("flat_x_swap.size() = {:?}", flat_x_swap.size());
// println!("M_swap.size() = {:?}", m_swap.size());
// println!("M.size() = {:?}", m.size());
// println!("reshaped_M.size() = {:?}", reshaped_m.size());
// return reshaped_M.reshape(bx_batch_shape)
reshaped_m
}
pub enum Scale {
ScaleTril(Tensor),
Covariance(Tensor),
Precision(Tensor)
}
fn _drop_rightmost_dim(t: Tensor) -> Tensor {
let t = t.narrow(t.ld() as i64, 0, 1);
t.reshape(&t.size().as_slice()[..t.ld()])
}
pub struct MultivariateNormal {
_batch_shape: Vec<i64>,
_event_shape: Vec<i64>,
loc: Tensor,
scale: Scale,
_unbroadcasted_scale_tril: Tensor
}
impl Distribution for MultivariateNormal {
fn expand(&self, batch_shape: &[i64], _instance: bool) -> Self {
let mut loc_shape = batch_shape.to_vec();
loc_shape.extend(self._event_shape.clone());
let mut cov_shape = batch_shape.to_vec();
cov_shape.extend(self._event_shape.clone());
cov_shape.extend(self._event_shape.clone());
let loc = self.loc.expand(loc_shape.as_slice(), false); // TODO: check meaning of implicit
let _unbroadcasted_scale_tril = 1 * &self._unbroadcasted_scale_tril;
let scale = match &self.scale {
Scale::ScaleTril(s) => Scale::ScaleTril(s.expand(cov_shape.as_slice(), false)), // TODO: check meaning of implicit
_ => unimplemented!()
};
MultivariateNormal::new(&loc, &scale)
}
// below code not ported from pytorch:
//
// if 'covariance_matrix' in self.__dict__:
// new.covariance_matrix = self.covariance_matrix.expand(cov_shape)
// if 'scale_tril' in self.__dict__:
// new.scale_tril = self.scale_tril.expand(cov_shape)
// if 'precision_matrix' in self.__dict__:
// new.precision_matrix = self.precision_matrix.expand(cov_shape)
// super(MultivariateNormal, new).__init__(batch_shape,
// self.event_shape,
// validate_args=False)
// new._validate_args = self._validate_args
// return new
fn rsample(&self, sample_shape: &[i64]) -> Tensor {
let shape = self._extended_shape(sample_shape);
let eps = Tensor::randn(shape.as_slice(), kind::FLOAT_CPU);
// eps.print(); // for debug
&self.loc + _batch_mv(&self._unbroadcasted_scale_tril, &eps)
}
fn log_prob(&self, value: &Tensor) -> Tensor {
let diff = value - &self.loc;
let m = _batch_mahalanobis(&self._unbroadcasted_scale_tril, &diff);
let half_log_det = self._unbroadcasted_scale_tril
.diagonal(0, -2, -1).log().sum2(&[-1], false);
-0.5 * (self._event_shape[0] as f64 * f64::ln(2.0 * PI) + m)
- half_log_det
}
fn entropy(&self) -> Tensor {
unimplemented!();
}
}
impl MultivariateNormal {
pub fn new(loc_: &Tensor, scale_: &Scale) -> Self {
let mut loc = loc_.unsqueeze(-1);
let scale;
let _unbroadcasted_scale_tril;
if let Scale::ScaleTril(s) = scale_ {
let mut tmp = Tensor::broadcast_tensors(&[s, &loc]);
loc = _drop_rightmost_dim(tmp.pop().unwrap());
scale = Scale::ScaleTril(tmp.pop().unwrap());
_unbroadcasted_scale_tril = s.shallow_clone();
}
else {
unimplemented!();
}
MultivariateNormal {
_batch_shape: loc.size()[..loc.ld()].to_vec(),
_event_shape: [loc.size()[loc.ld()]].to_vec(),
loc,
scale,
_unbroadcasted_scale_tril
}
}
/// Returns the size of the sample returned by the distribution, given
/// a `sample_shape`. Note, that the batch and event shapes of a
/// distribution instance are fixed at the time of construction.
/// If this is empty, the returned shape is upcast to (1,).
///
/// # Arguments
///
/// * `sample_shape` - the size of the sample to be drawn.
fn _extended_shape(&self, sample_shape: &[i64]) -> Vec<i64> {
let mut sample_shape = sample_shape.to_vec();
sample_shape.extend(&self._batch_shape);
sample_shape.extend(&self._event_shape);
sample_shape
}
}
// fn test_multivariate_normal_sample() {
// manual_seed(0);
// let mean = Tensor::randn(&[3], kind::FLOAT_CPU);
// mean.requires_grad();
// let tmp = Tensor::randn(&[3, 10], kind::FLOAT_CPU);
// let cov = tmp.matmul(&tmp.transpose(0, 1)) / 10;
// cov.requires_grad();
// let prec = cov.inverse();
// prec.requires_grad();
// let scale_tril = cov.cholesky(false);
// scale_tril.requires_grad();
// }
// def test_multivariate_normal_sample(self):
// set_rng_seed(0) # see Note [Randomized statistical tests]
// mean = torch.randn(3, requires_grad=True)
// tmp = torch.randn(3, 10)
// cov = (torch.matmul(tmp, tmp.t()) / tmp.size(-1)).requires_grad_()
// prec = cov.inverse().requires_grad_()
// scale_tril = torch.cholesky(cov, upper=False).requires_grad_()
// self._check_sampler_sampler(MultivariateNormal(mean, cov),
// scipy.stats.multivariate_normal(mean.detach().numpy(), cov.detach().numpy()),
// 'MultivariateNormal(loc={}, cov={})'.format(mean, cov),
// multivariate=True)
// self._check_sampler_sampler(MultivariateNormal(mean, precision_matrix=prec),
// scipy.stats.multivariate_normal(mean.detach().numpy(), cov.detach().numpy()),
// 'MultivariateNormal(loc={}, prec={})'.format(mean, prec),
// multivariate=True)
// self._check_sampler_sampler(MultivariateNormal(mean, scale_tril=scale_tril),
// scipy.stats.multivariate_normal(mean.detach().numpy(), cov.detach().numpy()),
// 'MultivariateNormal(loc={}, scale_tril={})'.format(mean, scale_tril),
// multivariate=True)
// class LowerCholeskyTransform(Transform):
// """
// Transform from unconstrained matrices to lower-triangular matrices with
// nonnegative diagonal entries.
// This is useful for parameterizing positive definite matrices in terms of
// their Cholesky factorization.
// """
// domain = constraints.real
// codomain = constraints.lower_cholesky
// event_dim = 2
// def __eq__(self, other):
// return isinstance(other, LowerCholeskyTransform)
// def _call_on_event(self, x):
// return x.tril(-1) + x.diag().exp().diag()
// def _inverse_on_event(self, y):
// return y.tril(-1) + y.diag().log().diag()
// def _call(self, x):
// flat_x = x.contiguous().view((-1,) + x.shape[-2:])
// return torch.stack([self._call_on_event(flat_x[i]) for i in range(flat_x.size(0))]).view(x.shape)
// def _inverse(self, y):
// flat_y = y.contiguous().view((-1,) + y.shape[-2:])
// return torch.stack([self._inverse_on_event(flat_y[i]) for i in range(flat_y.size(0))]).view(y.shape)
pub mod lower_cholesky_transform {
use tch::{Tensor};
use crate::{TensorUtil};
fn _call_on_event(x: &Tensor) -> Tensor {
x.tril(-1) + x.diag(0).exp().diagflat(0)
}
pub fn transform(x: &Tensor) -> Tensor {
let flat_shape = [vec![-1], x.eshape(2)].concat();
let flat_x = x.contiguous().view(flat_shape.as_slice());
let n = flat_x.size()[0];
Tensor::stack(
&c![_call_on_event(&flat_x.slice(0, i, i + 1, 1).squeeze()),
for i in 0..n], 0
).view(&x.size())
}
}
|
use std::fs::File;
use std::io::Read;
use std::path::Path;
fn main() {
let mut f = File::open(Path::new("input/day12.txt")).unwrap();
let mut s = String::new();
f.read_to_string(&mut s).ok();
let input = parse_input(&s.trim());
let (p1, p2) = solve(input);
println!("Part 1: {}, Part 2: {}", p1, p2)
}
fn solve(input: Vec<Vec<usize>>) -> (usize, usize) {
let mut groups = vec![];
for pipe in input {
let x = pipe[0];
for &y in &pipe[1..] {
add_link(x, y, &mut groups);
}
}
(groups[0].len(), groups.len())
}
fn parse_input(input: &str) -> Vec<Vec<usize>> {
input
.lines()
.map(|line| {
line.split_whitespace()
.filter_map(|n| n.trim_matches(',').parse::<usize>().ok())
.collect::<Vec<usize>>()
})
.collect::<Vec<Vec<usize>>>()
}
fn find_item(k: usize, v: &Vec<Vec<usize>>) -> Option<usize> {
let mut res: Option<usize> = None;
for (pos, ve) in v.iter().enumerate() {
if find_item_2(k, &ve) {
res = Some(pos)
};
}
res
}
fn find_item_2(k: usize, v: &Vec<usize>) -> bool {
let mut res = false;
for i in v {
res = res || (k == *i);
}
res
}
fn add_link(x: usize, y: usize, groups: &mut Vec<Vec<usize>>) {
match (find_item(x, &groups), find_item(y, &groups)) {
(Some(i), Some(j)) if i == j => (),
(Some(i), Some(j)) => merge(i, j, groups),
(None, Some(j)) => groups[j].push(x),
(Some(i), None) => groups[i].push(y),
(None, None) if x == y => groups.push(vec![x]),
(None, None) => groups.push(vec![x, y]),
};
}
fn merge(i: usize, j: usize, groups: &mut Vec<Vec<usize>>) {
if i < j {
let mut v = groups.swap_remove(j);
groups[i].append(&mut v);
} else {
merge(j, i, groups)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn link_test() {
let mut groups = vec![vec![0], vec![1], vec![2, 3], vec![4]];
add_link(1, 4, &mut groups);
assert_eq!(groups, vec![vec![0], vec![1, 4], vec![2, 3]]);
add_link(5, 6, &mut groups);
assert_eq!(groups, [vec![0], vec![1, 4], vec![2, 3], vec![5, 6]]);
}
#[test]
fn solve_test() {
let input = "0 <-> 2
1 <-> 1
2 <-> 0, 3, 4
3 <-> 2, 4
4 <-> 2, 3, 6
5 <-> 6
6 <-> 4, 5";
assert_eq!(solve(parse_input(input)), (6, 2))
}
}
|
use bson::RawDocumentBuf;
use crate::{
cmap::{conn::PinnedConnectionHandle, Command, RawCommandResponse, StreamDescription},
concern::WriteConcern,
cursor::CursorSpecification,
error::{Error, Result},
operation::{CursorBody, Operation, RunCommand},
options::RunCursorCommandOptions,
selection_criteria::SelectionCriteria,
};
#[derive(Debug, Clone)]
pub(crate) struct RunCursorCommand<'conn> {
run_command: RunCommand<'conn>,
options: Option<RunCursorCommandOptions>,
}
impl<'conn> RunCursorCommand<'conn> {
pub(crate) fn new(
run_command: RunCommand<'conn>,
options: Option<RunCursorCommandOptions>,
) -> Result<Self> {
Ok(Self {
run_command,
options,
})
}
}
impl<'conn> Operation for RunCursorCommand<'conn> {
type O = CursorSpecification;
type Command = RawDocumentBuf;
const NAME: &'static str = "run_cursor_command";
fn build(&mut self, description: &StreamDescription) -> Result<Command<Self::Command>> {
self.run_command.build(description)
}
fn serialize_command(&mut self, cmd: Command<Self::Command>) -> Result<Vec<u8>> {
self.run_command.serialize_command(cmd)
}
fn extract_at_cluster_time(
&self,
response: &bson::RawDocument,
) -> Result<Option<bson::Timestamp>> {
self.run_command.extract_at_cluster_time(response)
}
fn handle_error(&self, error: Error) -> Result<Self::O> {
Err(error)
}
fn selection_criteria(&self) -> Option<&SelectionCriteria> {
self.run_command.selection_criteria()
}
fn is_acknowledged(&self) -> bool {
self.run_command.is_acknowledged()
}
fn write_concern(&self) -> Option<&WriteConcern> {
self.run_command.write_concern()
}
fn supports_read_concern(&self, description: &StreamDescription) -> bool {
self.run_command.supports_read_concern(description)
}
fn supports_sessions(&self) -> bool {
self.run_command.supports_sessions()
}
fn retryability(&self) -> crate::operation::Retryability {
self.run_command.retryability()
}
fn update_for_retry(&mut self) {
self.run_command.update_for_retry()
}
fn pinned_connection(&self) -> Option<&PinnedConnectionHandle> {
self.run_command.pinned_connection()
}
fn name(&self) -> &str {
self.run_command.name()
}
fn handle_response(
&self,
response: RawCommandResponse,
description: &StreamDescription,
) -> Result<Self::O> {
let cursor_response: CursorBody = response.body()?;
let comment = match &self.options {
Some(options) => options.comment.clone(),
None => None,
};
Ok(CursorSpecification::new(
cursor_response.cursor,
description.server_address.clone(),
self.options.as_ref().and_then(|opts| opts.batch_size),
self.options.as_ref().and_then(|opts| opts.max_time),
comment,
))
}
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use darling::FromMeta;
use heck::CamelCase;
use quote::format_ident;
use syn::parse::Parse;
use syn::parse::ParseStream;
use syn::spanned::Spanned;
use crate::Method;
use crate::MethodAttrs;
use crate::Service;
fn parse_method(method: syn::TraitItemMethod, has_ref: &mut bool) -> syn::Result<Method> {
if !method.sig.generics.params.is_empty() {
return Err(syn::Error::new(
method.sig.generics.span(),
"RPC methods cannot have generic parameters",
));
}
let ident = method.sig.ident;
let camel_ident = syn::Ident::new(&ident.to_string().to_camel_case(), ident.span());
// Search through the attributes and find any with the `rpc`
// path. We need to exclude these from getting passed through
// and expanded.
let mut custom_attrs = None;
#[allow(clippy::unnecessary_filter_map)]
let attrs: Vec<_> = method
.attrs
.into_iter()
.filter_map(|attrs| {
// Clippy complains about this `filter_map` being
// equivalent to `filter`, but it's not because `attrs`
// needs to be passed by value to the closure so we can
// move it out.
if attrs.path.is_ident("rpc") {
custom_attrs = Some(attrs);
None
} else {
Some(attrs)
}
})
.collect();
let method_attrs = match custom_attrs {
Some(custom_attrs) => {
let meta = custom_attrs.parse_meta()?;
MethodAttrs::from_meta(&meta)?
}
None => MethodAttrs::default(),
};
if method_attrs.no_response {
if method.sig.output != syn::ReturnType::Default {
return Err(syn::Error::new(
method.sig.output.span(),
"#[rpc(no_response)] methods cannot have a return type",
));
}
}
let args = method
.sig
.inputs
.iter()
.filter_map(|arg| match arg {
syn::FnArg::Receiver(_) => None,
syn::FnArg::Typed(t) => {
if let syn::Type::Reference(_) = t.ty.as_ref() {
*has_ref = true;
}
Some(t.clone())
}
})
.collect();
Ok(Method {
attrs,
method_attrs,
ident,
camel_ident,
args,
output: method.sig.output,
})
}
impl Parse for Service {
fn parse(input: ParseStream) -> syn::Result<Self> {
let t: syn::ItemTrait = input.parse()?;
let attrs = t.attrs;
let vis = t.vis;
let ident = t.ident;
let mut has_ref = false;
let mut methods = Vec::new();
for inner in t.items {
if let syn::TraitItem::Method(method) = inner {
if method.sig.ident == "serve" {
return Err(syn::Error::new(
ident.span(),
format!("method conflicts with generated fn {}::serve", ident),
));
}
methods.push(parse_method(method, &mut has_ref)?);
}
}
let request_ident = format_ident!("{}Request", ident, span = ident.span());
let response_ident = format_ident!("{}Response", ident, span = ident.span());
let server_ident = format_ident!("Serve{}", ident, span = ident.span());
let client_ident = format_ident!("{}Client", ident, span = ident.span());
let request_generics = if has_ref {
Some(syn::parse_quote!(<'req>))
} else {
None
};
Ok(Self {
attrs,
vis,
ident,
methods,
request_ident,
request_generics,
response_ident,
server_ident,
client_ident,
})
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// WidgetTimeWindows : Define a time window.
/// Define a time window.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum WidgetTimeWindows {
#[serde(rename = "7d")]
SEVEN_DAYS,
#[serde(rename = "30d")]
THIRTY_DAYS,
#[serde(rename = "90d")]
NINETY_DAYS,
#[serde(rename = "week_to_date")]
WEEK_TO_DATE,
#[serde(rename = "previous_week")]
PREVIOUS_WEEK,
#[serde(rename = "month_to_date")]
MONTH_TO_DATE,
#[serde(rename = "previous_month")]
PREVIOUS_MONTH,
#[serde(rename = "global_time")]
GLOBAL_TIME,
}
impl ToString for WidgetTimeWindows {
fn to_string(&self) -> String {
match self {
Self::SEVEN_DAYS => String::from("7d"),
Self::THIRTY_DAYS => String::from("30d"),
Self::NINETY_DAYS => String::from("90d"),
Self::WEEK_TO_DATE => String::from("week_to_date"),
Self::PREVIOUS_WEEK => String::from("previous_week"),
Self::MONTH_TO_DATE => String::from("month_to_date"),
Self::PREVIOUS_MONTH => String::from("previous_month"),
Self::GLOBAL_TIME => String::from("global_time"),
}
}
}
|
use crate::address_bus::PpuAddressBus;
use crate::cpu;
#[macro_use]
use derive_serialize::Serialize;
use std::cell::Cell;
mod bg_state;
mod palette;
mod sprite_state;
#[cfg(test)]
mod test;
#[derive(Serialize, Debug)]
pub struct Ppu {
cycle_count: i32,
current_scanline: i16,
current_scanline_dot: u16,
primary_oam: PrimaryOam,
secondary_oam: SecondaryOam,
bg_state: bg_state::BgDrawState,
sprite_state: sprite_state::SpriteDrawState,
// registers
ppuctrl: u8,
ppumask: u8,
ppustatus: u8,
// NOTE: on some ppu chips, there are bugs relating to
// writing to oamaddr. not sure if these need to be
// emulated, but it may be worth keeping in mind
oamaddr: u8,
ppudata_read_buffer: u8,
// bitfield w/ misc ppu flags
bits: PpuBits::BitField,
// address of the current tile to be fetched and drawn. points
// to a byte in one of the nametables in vram. referred to
// as 'v' in the nesdev.com 'ppu scrolling' article
current_vram_addr: VramAddrRegister,
// temporary address, same as above but doesn't get
// incremented while drawing. this register is shared
// by 'ppuscroll' and 'ppuaddr' (so writes to these
// registers go into this). referred to as 't' in the
// nesdev.com 'ppu scrolling' article
temp_vram_addr: VramAddrRegister,
}
bitfield!(PpuBits<u8>(
frame_done: 0..0,
even_frame: 1..1,
low_bits_toggle: 2..2,
suppress_vblank_flag: 3..3,
fine_x_scroll: 4..6,
));
#[derive(Copy, Clone)]
enum SpriteSize {
S8x8 = 8,
S8x16 = 16,
}
#[derive(Serialize, Debug)]
pub struct PrimaryOam {
pub entries: [OamEntry; 64],
}
#[derive(Serialize, Debug)]
pub struct SecondaryOam {
pub entries: [OamEntry; 8],
}
#[derive(Serialize, Copy, Clone, Default, Debug)]
#[repr(C)]
pub struct OamEntry {
pub y: u8,
pub tile_index: u8,
pub attributes: u8,
pub x: u8,
}
// convenience methods for the 'SecondaryOam' and 'PrimaryOam' structs
macro_rules! oam_impl {
($oam:ty, $n_entries:literal) => {
impl $oam {
pub fn as_bytes<'a>(&'a self) -> &'a [u8; $n_entries * 4] {
unsafe { std::mem::transmute(self) }
}
pub fn as_bytes_mut<'a>(&'a mut self) -> &'a mut [u8; $n_entries * 4] {
unsafe { std::mem::transmute(self) }
}
// NOTE: this function is only used by 'PrimaryOam'
#[allow(dead_code)]
pub fn get_byte(&self, index: u8) -> u8 {
assert_eq!($n_entries, 64);
// SAFETY: 'index' is a u8 and cannot be larger than
// the size of oam, making unchecked indexing safe
unsafe { *self.as_bytes().get_unchecked(index as usize) }
}
pub fn set_byte(&mut self, index: u8, val: u8) {
assert!((index as u16) < $n_entries as u16 * 4);
unsafe { *self.as_bytes_mut().get_unchecked_mut(index as usize) = val };
}
// gets the 'OamEntry' given by 'index' (a byte index into oam)
// without doing bounds checking. for safety, this requires
// that 'index' doesn't point past the last 4 bytes of oam.
// for correctness, 'index' should also be a multiple of 4,
// so as to fetch the right sprite data from oam.
// NOTE: this function is only used by 'SecondaryOam'
#[allow(dead_code)]
pub unsafe fn get_sprite_unchecked(&self, index: u8) -> OamEntry {
*(self.as_bytes().get_unchecked(index as usize) as *const _ as *const _)
}
}
impl Default for $oam {
fn default() -> Self {
Self {
entries: [OamEntry::default(); $n_entries],
}
}
}
};
}
oam_impl!(PrimaryOam, 64);
oam_impl!(SecondaryOam, 8);
#[derive(Serialize, Copy, Clone, Debug)]
struct VramAddrRegister {
inner: u16,
}
impl VramAddrRegister {
fn get_addr(self) -> u16 {
self.inner & 0x3fff
}
fn get_coarse_x(self) -> u8 {
(self.inner & 0b11111) as u8
}
fn set_coarse_x(&mut self, coarse_x: u8) {
self.inner = (self.inner & !0b11111) | coarse_x as u16;
}
fn get_coarse_y(self) -> u8 {
((self.inner >> 5) & 0b11111) as u8
}
fn set_coarse_y(&mut self, coarse_y: u8) {
self.inner = (self.inner & !0b1111100000) | ((coarse_y as u16) << 5);
}
fn get_fine_y(self) -> u8 {
((self.inner & 0b111_00_00000_00000) >> 12) as u8
}
fn set_fine_y(&mut self, fine_y: u8) {
self.inner = (self.inner & !0b111_00_00000_00000) | ((fine_y as u16) << 12);
}
fn get_nametable_select(self) -> u8 {
((self.inner & 0b11_00000_00000) >> 10) as u8
}
fn set_nametable_select(&mut self, select: u8) {
self.inner = (self.inner & !0b11_00000_00000) | ((select as u16) << 10);
}
}
impl Ppu {
pub fn new() -> Self {
Self {
bg_state: bg_state::BgDrawState::default(),
sprite_state: sprite_state::SpriteDrawState::default(),
secondary_oam: SecondaryOam::default(),
primary_oam: PrimaryOam::default(),
ppuctrl: 0,
ppumask: 0,
ppustatus: 0,
oamaddr: 0,
ppudata_read_buffer: 0,
current_vram_addr: VramAddrRegister { inner: 0 },
temp_vram_addr: VramAddrRegister { inner: 0 },
current_scanline: 240,
current_scanline_dot: 0,
bits: PpuBits::BitField::new(0, 1, 0, 0, 0),
cycle_count: 0,
}
}
#[cfg(test)]
pub fn reset_state(&mut self) {
self.secondary_oam = SecondaryOam::default();
self.primary_oam = PrimaryOam::default();
self.ppuctrl = 0;
self.ppumask = 0;
self.ppustatus = 0;
self.oamaddr = 0;
self.ppudata_read_buffer = 0;
self.current_vram_addr = VramAddrRegister { inner: 0 };
self.temp_vram_addr = VramAddrRegister { inner: 0 };
self.current_scanline = 240;
self.current_scanline_dot = 0;
self.bits = PpuBits::BitField::new(0, 1, 0, 0, 0);
self.cycle_count = 0;
}
pub fn sub_cycle_count(&mut self, sub: i32) {
self.cycle_count -= sub;
}
// sets the low 5 bits of 'ppustatus' equal to the low 5 bits of 'val'
pub fn set_ppustatus_low_bits(&mut self, val: u8) {
self.ppustatus &= !0b11111;
self.ppustatus |= val & 0b11111;
}
pub fn is_frame_done(&self) -> bool {
self.bits.frame_done.is_true()
}
pub fn set_frame_done(&mut self, done: bool) {
self.bits.frame_done.set(done as u8);
}
// NOTE: this is also used by 'write_oamdma()' in 'address_bus'
pub fn write_to_oam_and_increment_addr(&mut self, val: u8) {
self.primary_oam.set_byte(self.oamaddr, val);
self.oamaddr = self.oamaddr.wrapping_add(1);
}
// used for reading the registers located in the cpu memory map at 0x2000-0x2007
pub fn read_register_by_index(
&mut self,
index: u8,
bus: &mut dyn PpuAddressBus,
cpu: &mut cpu::Cpu,
) -> u8 {
// NOTE: to help readability, this function and a few others on 'Ppu' are
// split into smaller subfunctions. instead of factoring these subfunctions
// out into the outer 'Ppu' impl block, i decided to keep them nested, so as
// to not give the impression that they are needed anywhere else
{
return match index {
// ppuctrl
0 => 0,
// ppumask
1 => self.ppumask,
// ppustatus
2 => read_ppustatus(self),
// oamaddr
3 => 0,
// oamdata
4 => read_oamdata(self),
// ppuscroll | ppuaddr
5 | 6 => 0, // FIXME: should these reset the low bits toggle as well??
// ppudata
7 => read_ppudata(self, bus, cpu),
_ => 0,
};
}
fn read_ppustatus(ppu: &mut Ppu) -> u8 {
// clear low bits toggle
ppu.bits.low_bits_toggle.set(0);
let status = ppu.ppustatus;
// clear vblank flag
ppu.set_vblank(false);
if ppu.current_scanline == 241 && ppu.current_scanline_dot == 1 {
// vblank flag should not have been set yet
debug_assert!(status & 0b10000000 == 0);
// if there is one cycle left before the vblank flag will be set
// (when scanline = 241 and dot = 1, the flag will be set on the
// next call to 'step()'), prevent the vblank flag from being set
ppu.bits.suppress_vblank_flag.set(1);
}
status
}
fn read_oamdata(ppu: &mut Ppu) -> u8 {
if let (0..=239, 1..=64) = (ppu.current_scanline, ppu.current_scanline_dot) {
// if on dots/cycles 1-64 of a visible scanline, return 0xff
return 0xff;
}
let mut byte = ppu.primary_oam.get_byte(ppu.oamaddr);
if ppu.oamaddr % 4 == 2 {
// if 'byte' is a sprite attribute byte, clear bits 2-4
byte &= 0b11100011;
}
byte
}
fn read_ppudata(ppu: &mut Ppu, bus: &mut dyn PpuAddressBus, cpu: &mut cpu::Cpu) -> u8 {
let val = if (ppu.current_vram_addr.inner >> 8) == 0b111111 {
// read directly from vram if address is in range
// 0x3f00-0x3fff (palette ram)
let val = bus.read(ppu.current_vram_addr.get_addr(), ppu.cycle_count, cpu);
// store value at mirrored address (down to 0x2f00-0x2fff)
// in read buffer
ppu.ppudata_read_buffer = bus.read(
ppu.current_vram_addr.get_addr() & !0x1000,
ppu.cycle_count,
cpu,
);
val
} else {
// read from read buffer if address is in range 0-0x3eff
let val = ppu.ppudata_read_buffer;
ppu.ppudata_read_buffer =
bus.read(ppu.current_vram_addr.get_addr(), ppu.cycle_count, cpu);
val
};
if !ppu.is_currently_rendering() {
// if not currently rendering, increment normally
ppu.increment_vram_addr();
// set address on address bus equal to 'current_vram_addr'
bus.set_address(ppu.current_vram_addr.inner, ppu.cycle_count, cpu);
} else {
// if currently rendering, increment the bits of the address
// corresponding to the y position and coarse x position (this
// is afaik unintended behavior)
ppu.increment_vram_addr_coarse_x();
ppu.increment_vram_addr_y();
}
val
}
}
// used for writing to the registers located in the cpu memory map at 0x2000-0x2007
pub fn write_register_by_index(
&mut self,
index: u8,
val: u8,
cpu: &mut cpu::Cpu,
bus: &mut dyn PpuAddressBus,
) {
{
match index {
// ppuctrl
0 => write_ppuctrl(self, val, cpu),
// ppumask
1 => write_ppumask(self, val, bus, cpu),
// ppustatus, ignore attemps to write
2 => return,
// oamaddr
3 => self.oamaddr = val,
// oamdata
4 => write_oamdata(self, val),
// ppuscroll
5 => write_ppuscroll(self, val),
// ppuaddr
6 => write_ppuaddr(self, val, bus, cpu),
// ppudata
7 => write_ppudata(self, val, bus, cpu),
_ => (),
}
}
fn write_ppuctrl(ppu: &mut Ppu, val: u8, cpu: &mut cpu::Cpu) {
// set bits 10-11 of 'temp_vram_addr' equal to the low 2 bits of 'val'
ppu.temp_vram_addr.set_nametable_select(val & 0b11);
// true if nmi_enable bit went from 1 to 0 or 0 to 1
let nmi_toggled = ((ppu.ppuctrl ^ val) >> 7) != 0;
ppu.ppuctrl = val;
ppu.set_ppustatus_low_bits(val);
if nmi_toggled && ppu.is_vblank_nmi_enabled() && ppu.is_vblank() {
cpu.bits.nmi.set(1);
// HACK: delay nmi by one instruction
cpu.bits.delay_nmi.set(1);
}
}
fn write_ppumask(ppu: &mut Ppu, val: u8, bus: &mut dyn PpuAddressBus, cpu: &mut cpu::Cpu) {
ppu.ppumask = val;
if !ppu.is_currently_rendering() {
bus.set_address(ppu.current_vram_addr.inner, ppu.cycle_count, cpu);
}
}
fn write_oamdata(ppu: &mut Ppu, val: u8) {
if ppu.is_currently_rendering() {
// ignore attemps to write when rendering
return;
}
ppu.write_to_oam_and_increment_addr(val);
ppu.set_ppustatus_low_bits(val);
}
fn write_ppuscroll(ppu: &mut Ppu, val: u8) {
// low bits toggle = 0 => x coordinate is being written
if !ppu.bits.low_bits_toggle.is_true() {
// write low 3 bits (fine x) to 'ppu.fine_x_scroll'
ppu.bits.fine_x_scroll.set(val & 0b111);
// write high 5 bits (coarse x) to low 5 bits
// of temporary vram address register
ppu.temp_vram_addr.set_coarse_x(val >> 3);
}
// low bits toggle = 1 => y coordinate is being written
else {
// write high 5 bits (coarse y) to
// bits 5-10 of 'temp_vram_addr'
ppu.temp_vram_addr.set_coarse_y(val >> 3);
// write low 3 bits (fine y) to bits
// 12-14 of 'temp_vram_addr'
ppu.temp_vram_addr.set_fine_y(val & 0b111);
}
ppu.set_ppustatus_low_bits(val);
ppu.toggle_low_bits_toggle();
}
fn write_ppuaddr(ppu: &mut Ppu, val: u8, bus: &mut dyn PpuAddressBus, cpu: &mut cpu::Cpu) {
let mut temp_vram_addr_bytes = ppu.temp_vram_addr.inner.to_le_bytes();
if !ppu.bits.low_bits_toggle.is_true() {
// write low 6 bits into bits 8-13 of temporary
// vram address register while clearing bit 14
temp_vram_addr_bytes[1] = val & 0b0111111;
// store back
ppu.temp_vram_addr.inner = u16::from_le_bytes(temp_vram_addr_bytes);
} else {
// set all low bits of temporary vram register equal to 'val'
temp_vram_addr_bytes[0] = val;
ppu.temp_vram_addr.inner = u16::from_le_bytes(temp_vram_addr_bytes);
// set 'current_vram_addr' equal to 'temp_vram_addr'
ppu.current_vram_addr = ppu.temp_vram_addr;
if !ppu.is_currently_rendering() {
// if not currently rendering, the address bus
// should be set to 'current_vram_addr'
bus.set_address(ppu.current_vram_addr.inner, ppu.cycle_count, cpu);
}
}
ppu.set_ppustatus_low_bits(val);
ppu.toggle_low_bits_toggle();
}
fn write_ppudata(ppu: &mut Ppu, val: u8, bus: &mut dyn PpuAddressBus, cpu: &mut cpu::Cpu) {
bus.write(ppu.current_vram_addr.get_addr(), val, ppu.cycle_count, cpu);
ppu.set_ppustatus_low_bits(val);
// increment 'current_vram_addr' (same as when reading ppudata)
if !ppu.is_currently_rendering() {
ppu.increment_vram_addr();
bus.set_address(ppu.current_vram_addr.inner, ppu.cycle_count, cpu);
} else {
ppu.increment_vram_addr_coarse_x();
ppu.increment_vram_addr_y();
}
}
}
// catches the ppu up to the cpu (approximately)
pub fn catch_up(
&mut self,
cpu: &mut cpu::Cpu,
bus: &mut dyn PpuAddressBus,
framebuffer: &[Cell<u32>; 256 * 240],
) {
let target_cycles = cpu.cycle_count as i32 * 3;
while self.cycle_count < target_cycles {
self.step(cpu, bus, framebuffer);
}
}
// steps the ppu for one tile worth of cycles or less (1-8 cycles).
// only used internally by the ppu, in 'Ppu::catch_up()'
fn step(
&mut self,
cpu: &mut cpu::Cpu,
bus: &mut dyn PpuAddressBus,
framebuffer: &[Cell<u32>; 256 * 240],
) {
// NOTE: this function is split into multiple subfunctions
{
match self.current_scanline {
// pre-render and visible scanlines
-1..=239 => step_pre_render_or_visible_line(self, bus, framebuffer, cpu),
// idle scanline
240 => step_idle_line(self),
// vblank 'scanlines'
241..=260 => step_vblank_line(self, cpu),
_ => (),
};
}
fn step_pre_render_or_visible_line(
ppu: &mut Ppu,
bus: &mut dyn PpuAddressBus,
framebuffer: &[Cell<u32>; 256 * 240],
cpu: &mut cpu::Cpu,
) {
match (ppu.current_scanline_dot, ppu.current_scanline) {
(0, sl) => {
ppu.current_scanline_dot += 1;
// if rendering is enabled and we're on the first visible scanline,
// only increment cycle count if current frame is even-numbered (idle
// cycle is skipped on odd frames)
if sl == 0 && (ppu.is_background_enable() || ppu.is_sprites_enable()) {
ppu.cycle_count += ppu.bits.even_frame.get() as i32;
} else {
ppu.cycle_count += 1;
}
// reset 'sprites_found', 'eval_done' and 'current_sprite_idx' before
// use (in dots 65-256)
ppu.sprite_state.sprites_found = 0;
ppu.sprite_state.eval_done = false;
// TODO: obscure behavior where 'current_sprite_idx' is set equal to
// the current value of oamaddr (which may not be zero) at the start
// of sprite evaluation, meaning a litle later than this
ppu.sprite_state.current_sprite_idx = 0;
}
// NOTE: though we often match on contiguous ranges of dots (x..=y) while
// stepping, the ppu is usually only advanced 8 cycles/dots at a time. this
// means that most of the dots in the range are never actually hit.
(1..=256, sl) => {
match sl {
// pre-render line
-1 => {
// OPTIMIZE: ideally, we would find some way of moving this
// block out into the outer match and have it fallthrough
// into here instead
if ppu.current_scanline_dot == 1 {
// clear vblank, sprite zero hit and sprite overflow flags
ppu.set_vblank(false);
ppu.set_sprite_zero_hit(false);
ppu.set_sprite_overflow(false);
}
}
// visible lines
_ => {
if ppu.current_scanline_dot >= 65
&& (ppu.is_sprites_enable() || ppu.is_background_enable())
{
// evaluate sprite on next scanline
for _ in 0..4 {
let sprite_overflow =
ppu.sprite_state.eval_next_scanline_sprite(
ppu.is_sprite_overflow(),
ppu.get_sprite_size(),
&ppu.primary_oam,
&mut ppu.secondary_oam,
ppu.current_scanline,
ppu.current_scanline_dot,
);
if sprite_overflow {
ppu.set_sprite_overflow(true);
}
}
}
// draw a row of 8 pixels horizontally
let sprite_zero_hit = ppu.draw_8_pixels(framebuffer, bus);
if sprite_zero_hit {
ppu.set_sprite_zero_hit(true);
}
}
}
if ppu.is_background_enable() || ppu.is_sprites_enable() {
// if last pixel drawn was 256th (end of scanline)
if ppu.current_scanline_dot + 8 == 257 {
// increment fine y
// NOTE: may wish to increment x here as well (what the ppu does irl)
ppu.increment_vram_addr_y();
} else {
// shift previously drawn tile data leftwards
// in 'bg_state' shift registers
ppu.bg_state.shift_tile_data_by_8();
// fill rightmost 8 bits of 'bg_state' shift registers
// with tile data for the next 8 pixels to draw
ppu.bg_state.fetch_current_tile_data(
ppu.cycle_count,
ppu.get_background_pattern_table_addr(),
ppu.current_vram_addr,
bus,
cpu,
);
// increment 'current_vram_addr' by one tile horizontally
ppu.increment_vram_addr_coarse_x();
}
}
ppu.cycle_count += 8;
ppu.current_scanline_dot += 8;
}
(257, _) => {
ppu.oamaddr = 0;
// set current sprite to zero so it can be re-used
// in 'fetch_next_scanline_sprite_data()'
ppu.sprite_state.current_sprite_idx = 0;
debug_assert!(ppu.sprite_state.sprites_found <= 8);
// fetch sprite data for the sprites found previously (during dots 65-256)
if ppu.is_sprites_enable() || ppu.is_background_enable() {
ppu.sprite_state.fetch_next_scanline_sprite_data(
&ppu.secondary_oam,
ppu.get_sprite_size(),
ppu.current_scanline,
ppu.current_scanline_dot,
ppu.get_8x8_sprite_pattern_table_addr(),
ppu.cycle_count,
bus,
cpu,
);
}
if ppu.is_sprites_enable() || ppu.is_background_enable() {
ppu.transfer_temp_horizontal_bits();
}
ppu.cycle_count += 8;
ppu.current_scanline_dot += 8;
}
(258..=320, sl) => {
ppu.oamaddr = 0;
// continue fetching sprite data
if ppu.is_sprites_enable() || ppu.is_background_enable() {
ppu.sprite_state.fetch_next_scanline_sprite_data(
&ppu.secondary_oam,
ppu.get_sprite_size(),
ppu.current_scanline,
ppu.current_scanline_dot,
ppu.get_8x8_sprite_pattern_table_addr(),
ppu.cycle_count,
bus,
cpu,
);
}
if sl == -1
// NOTE: this should actually happen at dot 280
&& ppu.current_scanline_dot == 281
&& (ppu.is_sprites_enable() || ppu.is_background_enable())
{
ppu.transfer_temp_vert_bits();
}
ppu.cycle_count += 8;
ppu.current_scanline_dot += 8;
}
(321..=327, _) => {
ppu.cycle_count += 7;
ppu.current_scanline_dot += 7;
}
(328..=336, _) => {
if ppu.is_background_enable() || ppu.is_sprites_enable() {
ppu.bg_state.shift_tile_data_by_8();
ppu.bg_state.fetch_current_tile_data(
ppu.cycle_count,
ppu.get_background_pattern_table_addr(),
ppu.current_vram_addr,
bus,
cpu,
);
ppu.increment_vram_addr_coarse_x();
}
match ppu.current_scanline_dot {
328 => {
ppu.cycle_count += 8;
ppu.current_scanline_dot += 8;
}
336 => {
ppu.cycle_count += 5;
ppu.current_scanline_dot = 0;
ppu.current_scanline += 1;
if ppu.current_scanline == 240 {
ppu.bits.frame_done.set(1);
}
}
_ => (),
}
}
_ => (),
}
}
fn step_idle_line(ppu: &mut Ppu) {
match ppu.current_scanline_dot {
336 => {
ppu.cycle_count += 5;
ppu.current_scanline_dot = 0;
ppu.current_scanline = 241;
}
0 | 168 => {
ppu.cycle_count += 168;
ppu.current_scanline_dot += 168;
}
_ => (),
}
}
fn step_vblank_line(ppu: &mut Ppu, cpu: &mut cpu::Cpu) {
match ppu.current_scanline_dot {
0 => {
ppu.cycle_count += 1;
ppu.current_scanline_dot += 1;
}
1 => {
// NOTE: setting of vblank flag may be suppressed by reads to ppustatus
if (ppu.current_scanline == 241) && !ppu.bits.suppress_vblank_flag.is_true() {
ppu.set_vblank(true);
}
ppu.cycle_count += 1;
ppu.current_scanline_dot += 1;
}
2 => {
ppu.cycle_count += 1;
ppu.current_scanline_dot += 1;
}
3 => {
// NOTE: we wait until dot 3 to assert nmi (even though the vblank flag
// is set on dot 1), in order to more accurately emulate nmi/vblank flag
// suppression. this way, any reads to ppustatus during dot 2 or 3 (before
// this chunk of code is executed) will prevent nmi from being asserted
if ppu.current_scanline == 241 {
if ppu.is_vblank_nmi_enabled() && ppu.is_vblank() {
cpu.bits.nmi.set(1);
}
}
ppu.cycle_count += 5;
ppu.current_scanline_dot += 5;
}
336 => {
ppu.cycle_count += 5;
ppu.current_scanline_dot = 0;
if ppu.current_scanline == 260 {
ppu.toggle_even_frame();
// reset scanline count
ppu.current_scanline = -1;
// ensure next frame's vblank flag will not be suppressed
ppu.bits.suppress_vblank_flag.set(0);
} else {
ppu.current_scanline += 1;
}
}
_ => {
ppu.cycle_count += 8;
ppu.current_scanline_dot += 8;
}
}
}
}
// draws 8 pixels, without making any state changes to 'self'.
// returns whether sprite zero was hit
fn draw_8_pixels(
&self,
framebuffer: &[Cell<u32>; 256 * 240],
bus: &mut dyn PpuAddressBus,
) -> bool {
// NOTE: this function is also split into subfunctions
{
if self.is_background_enable() || self.is_sprites_enable() {
return draw_8_pixels_bg_and_sprites(self, framebuffer, bus);
} else {
draw_8_pixels_backdrop_color(self, framebuffer, bus);
return false;
}
}
fn draw_8_pixels_bg_and_sprites(
ppu: &Ppu,
framebuffer: &[Cell<u32>; 256 * 240],
bus: &dyn PpuAddressBus,
) -> bool {
let mut sprite_zero_hit = false;
for i in 0..8 {
let tile_offset = i + ppu.bits.fine_x_scroll.get();
let bg_color_idx = match (
ppu.is_background_enable(),
ppu.current_scanline_dot + i as u16,
ppu.is_background_left_column_enable(),
) {
// set index to zero if bg is disabled
(false, _, _) => 0,
// or if on dots 1-8 and bg is disabled for this area
(_, 1..=8, false) => 0,
_ => {
let lo = ((ppu.bg_state.tile_bitplanes_lo.to_u16() >> (15 - tile_offset))
& 1) as u8;
let hi = (((ppu.bg_state.tile_bitplanes_hi.to_u16() >> (15 - tile_offset))
<< 1)
& 2) as u8;
lo | hi
}
};
let bg_palette_idx = if tile_offset > 7 {
ppu.bg_state.tile_palette_indices & 0b11
} else {
(ppu.bg_state.tile_palette_indices & 0b1100) >> 2
};
let mut sprite_zero = false;
let pixel_color = match (
ppu.is_sprites_enable(),
ppu.current_scanline_dot + i as u16,
ppu.is_sprites_left_column_enable(),
) {
// draw bg color if sprites are disabled
(false, _, _) => calc_pixel_color(ppu, bg_palette_idx, bg_color_idx, bus),
// draw bg color if on dots 1-8 and sprite
// drawing is disabled for the first 8 pixels
(_, 1..=8, false) => calc_pixel_color(ppu, bg_palette_idx, bg_color_idx, bus),
// otherwise, search for an active, non-transparent sprite at the current dot
_ => match ppu
.sprite_state
.get_sprite_at_dot_info(ppu.current_scanline_dot + i as u16)
{
// draw bg color if no sprite was found
None => calc_pixel_color(ppu, bg_palette_idx, bg_color_idx, bus),
Some(info) => {
sprite_zero = info.is_sprite_zero;
if info.is_in_front || bg_color_idx == 0 {
calc_pixel_color(ppu, info.palette_index, info.color_index, bus)
} else {
calc_pixel_color(ppu, bg_palette_idx, bg_color_idx, bus)
}
}
},
};
if sprite_zero
&& bg_color_idx != 0
&& (ppu.current_scanline_dot + i as u16 - 1) != 0xff
{
sprite_zero_hit = true;
}
let screen_x = (ppu.current_scanline_dot - 1) as usize + i as usize;
let screen_y = ppu.current_scanline as usize;
// OPTIMIZE: unchecked indexing
framebuffer[screen_y * 256 + screen_x].set(pixel_color);
}
sprite_zero_hit
}
// draws 8 pixels of backdrop color (or if 'current_vram_addr'
// >= 0x3f00, draws the color 'current_vram_addr' points to)
fn draw_8_pixels_backdrop_color(
ppu: &Ppu,
framebuffer: &[Cell<u32>; 256 * 240],
bus: &dyn PpuAddressBus,
) {
for i in 0..8 {
let pixel_color = {
let bg_color_idx = if ppu.current_vram_addr.get_addr() >= 0x3f00 {
logln!("background palette hack triggered");
(ppu.current_vram_addr.get_addr() & 0b11111) as u8
} else {
0
};
let bg_color_byte = bus.read_palette_memory(bg_color_idx);
palette::COLOR_LUT.get(
bg_color_byte,
ppu.is_greyscale_enabled(),
ppu.ppumask >> 5,
)
};
let screen_x = (ppu.current_scanline_dot - 1 + i as u16) as usize;
let screen_y = ppu.current_scanline as usize;
framebuffer[screen_y * 256 + screen_x].set(pixel_color);
}
}
fn calc_pixel_color(
ppu: &Ppu,
palette_idx: u8,
color_idx: u8,
bus: &dyn PpuAddressBus,
) -> u32 {
let final_idx = if color_idx == 0 {
0
} else {
(((palette_idx << 2) as u16) | (color_idx as u16)) as u8
};
let color_byte = bus.read_palette_memory(final_idx);
palette::COLOR_LUT.get(color_byte, ppu.is_greyscale_enabled(), ppu.ppumask >> 5)
}
}
// increments 'current_vram_addr' by 1 or 32, depending on the increment mode bit in ppuctrl
fn increment_vram_addr(&mut self) {
let increment = if self.get_vram_addr_increment() {
32
} else {
1
};
self.current_vram_addr.inner = (self.current_vram_addr.inner + increment) & 0x3fff;
}
// increments the fine y scroll bits in 'current_vram_addr',
// potentially overflowing into the coarse y scroll bits
fn increment_vram_addr_y(&mut self) {
if self.current_vram_addr.get_fine_y() == 0b111 {
// clear fine y bits if fine y bits = max
self.current_vram_addr.set_fine_y(0);
if self.current_vram_addr.get_coarse_y() == 29 {
// if carry from fine y bits = 1 and coarse y bits
// = 29 (there are 29 rows of tiles in a frame),
// clear all coarse y bits and overflow into bit
// 11 to move to next nametable vertically
self.current_vram_addr.set_coarse_y(0);
self.current_vram_addr.inner ^= 0b100000000000;
} else if self.current_vram_addr.get_coarse_y() == 0b11111 {
// if coarse y = maximum, wrap the value without overflowing into
// bit 11 and switching nametables (unintended behavior afaik)
self.current_vram_addr.set_coarse_y(0);
} else {
// increment coarse y bits of 'current_vram_addr'
self.current_vram_addr.inner += 1 << 5;
}
} else {
// increment fine y bits normally
self.current_vram_addr.inner += 1 << 12;
}
}
// increments the coarse x scroll/position bits in 'current_vram_addr'
// (corresponds to moving one tile to the right in the current nametable)
fn increment_vram_addr_coarse_x(&mut self) {
if self.current_vram_addr.get_coarse_x() == 0b11111 {
// if the coarse x component of 'current_vram_addr' is the highest
// value it can be (31), clear all coarse x bits and overflow
// into bit 10 (move to next nametable horizontally)
self.current_vram_addr.set_coarse_x(0);
self.current_vram_addr.inner ^= 0b10000000000;
} else {
// if not highest value, increment normally
self.current_vram_addr.inner += 1;
}
}
// transfers the coarse and fine y bits + high nametable select
// bit from 'temp_vram_addr' to 'current_vram_addr'
fn transfer_temp_vert_bits(&mut self) {
let temp_high_nt_select = self.temp_vram_addr.inner & 0x800;
self.current_vram_addr.inner &= !0x800;
self.current_vram_addr.inner |= temp_high_nt_select;
let temp_coarse_y = self.temp_vram_addr.get_coarse_y();
self.current_vram_addr.set_coarse_y(temp_coarse_y);
let temp_fine_y = self.temp_vram_addr.get_fine_y();
self.current_vram_addr.set_fine_y(temp_fine_y);
}
// transfers the coarse x bits + the lowest nametable select bit from
// 'temp_vram_addr' to 'current_vram_addr'
fn transfer_temp_horizontal_bits(&mut self) {
let temp_low_nt_select = self.temp_vram_addr.inner & 0x400;
self.current_vram_addr.inner &= !0x400;
self.current_vram_addr.inner |= temp_low_nt_select;
let temp_coarse_x = self.temp_vram_addr.get_coarse_x();
self.current_vram_addr.set_coarse_x(temp_coarse_x);
}
}
// second impl block to separate private getter/setter/convenience functions from the rest
impl Ppu {
fn toggle_even_frame(&mut self) {
let prev = self.bits.even_frame.is_true();
self.bits.even_frame.set(!prev as u8);
}
fn toggle_low_bits_toggle(&mut self) {
let prev = self.bits.low_bits_toggle.is_true();
self.bits.low_bits_toggle.set(!prev as u8);
}
#[cfg(test)]
fn get_base_nametable_addr(&self) -> u16 {
0x2000 | (((self.ppuctrl & 3) as u16) << 10)
}
// TODO: enum for clarity
fn get_vram_addr_increment(&self) -> bool {
(self.ppuctrl & 4) != 0
}
fn get_8x8_sprite_pattern_table_addr(&self) -> u16 {
((self.ppuctrl & 0b1000) as u16) << 9
}
fn get_background_pattern_table_addr(&self) -> u16 {
((self.ppuctrl & 0b10000) as u16) << 8
}
fn get_sprite_size(&self) -> SpriteSize {
if self.ppuctrl & 0b100000 != 0 {
SpriteSize::S8x16
} else {
SpriteSize::S8x8
}
}
fn is_vblank_nmi_enabled(&self) -> bool {
(self.ppuctrl >> 7) != 0
}
fn is_greyscale_enabled(&self) -> bool {
(self.ppumask & 1) != 0
}
// whether background will be displayed in the leftmost 8 pixel columns
fn is_background_left_column_enable(&self) -> bool {
(self.ppumask & 2) != 0
}
// whether sprites will be displayed in the leftmost 8 pixel columns
fn is_sprites_left_column_enable(&self) -> bool {
(self.ppumask & 4) != 0
}
// whether background will be displayed
fn is_background_enable(&self) -> bool {
(self.ppumask & 8) != 0
}
// whether sprites will be displayed
fn is_sprites_enable(&self) -> bool {
(self.ppumask & 0b10000) != 0
}
fn is_sprite_overflow(&self) -> bool {
(self.ppustatus & 0b100000) != 0
}
fn set_sprite_overflow(&mut self, overflow: bool) {
self.ppustatus = (self.ppustatus & !0b100000) | ((overflow as u8) << 5);
}
fn set_sprite_zero_hit(&mut self, hit: bool) {
self.ppustatus = (self.ppustatus & !0b1000000) | ((hit as u8) << 6);
}
fn is_vblank(&self) -> bool {
(self.ppustatus >> 7) != 0
}
fn set_vblank(&mut self, vblank: bool) {
self.ppustatus = (self.ppustatus & !0b10000000) | ((vblank as u8) << 7);
}
fn is_currently_rendering(&self) -> bool {
self.current_scanline < 241 && (self.is_sprites_enable() || self.is_background_enable())
}
}
|
use ruma_events_macros::event_content_enum;
event_content_enum! {
name: InvalidEvent,
events: [
"m.not.a.path",
]
}
event_content_enum! {
name: InvalidEvent,
events: [
"not.a.path",
]
}
fn main() {}
|
pub mod config {
use r2d2_sqlite::SqliteConnectionManager;
pub struct MyConfig {
pub server_address: String,
pub server_port: u16,
pub sqlite_manager: SqliteConnectionManager,
}
impl MyConfig {
pub fn new() -> Self {
let mut settings = config::Config::default();
settings.merge(config::File::with_name("Settings")).unwrap();
let server_address: String = settings.get("server.address").unwrap();
let server_port: u16 = settings.get("server.port").unwrap();
let sqlite_path: String = settings.get("db.sqlite_path").unwrap();
let sqlite_manager = SqliteConnectionManager::file(sqlite_path);
Self {
server_address,
server_port,
sqlite_manager,
}
}
}
}
pub mod db {
use r2d2::PooledConnection;
use r2d2_sqlite::SqliteConnectionManager;
use r2d2_sqlite::rusqlite::Error;
pub fn useradd(conn: PooledConnection<SqliteConnectionManager>, id: u32, name: String) -> Result<usize,Error> {
conn.execute(
"INSERT INTO users (id, name) VALUES ($1, $2)",
[id.to_string(), name],
)
}
}
pub mod headers {
use actix_web::{web, HttpResponse};
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use crate::db;
pub async fn index() -> HttpResponse {
HttpResponse::Ok().body("Hello world!")
}
pub async fn testdb(pool: web::Data<Pool<SqliteConnectionManager>>) -> HttpResponse {
let conn = pool.get().unwrap();
web::block(|| {
db::useradd(conn, 1, "hello".to_string())
}).await.unwrap();
HttpResponse::Ok().into()
}
}
|
use std::{error::Error as Err, fmt};
#[derive(Debug)]
pub enum Error {
/// The private_key field in the [Service Account Key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys)
/// is invalid and cannot be parsed
#[cfg(feature = "jwt")]
InvalidKeyFormat,
/// Unable to deserialize the base64 encoded RSA key
Base64Decode(base64::DecodeError),
/// An error occurred trying to create an HTTP request
Http(http::Error),
/// Failed to authenticate and retrieve an oauth token, and were unable to
/// deserialize a more exact reason from the error response
HttpStatus(http::StatusCode),
/// Failed to de/serialize JSON
Json(serde_json::Error),
/// Failed to authenticate and retrieve an oauth token
Auth(AuthError),
/// The RSA key seems valid, but is unable to sign a payload
#[cfg(feature = "jwt")]
InvalidRsaKey(ring::error::Unspecified),
/// The RSA key is invalid and cannot be used to sign
#[cfg(feature = "jwt")]
InvalidRsaKeyRejected(ring::error::KeyRejected),
/// A mutex has been poisoned due to a panic while a lock was held
Poisoned,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#![allow(clippy::enum_glob_use)]
use Error::*;
match self {
#[cfg(feature = "jwt")]
InvalidKeyFormat => f.write_str("The key format is invalid or unknown"),
Base64Decode(err) => write!(f, "{}", err),
Http(err) => write!(f, "{}", err),
HttpStatus(sc) => write!(f, "HTTP error status: {}", sc),
Json(err) => write!(f, "{}", err),
Auth(err) => write!(f, "{}", err),
#[cfg(feature = "jwt")]
InvalidRsaKey(_err) => f.write_str("RSA key is invalid"),
#[cfg(feature = "jwt")]
InvalidRsaKeyRejected(err) => write!(f, "RSA key is invalid: {}", err),
Poisoned => f.write_str("A mutex is poisoned"),
}
}
}
impl std::error::Error for Error {
fn cause(&self) -> Option<&dyn Err> {
use Error::{Auth, Base64Decode, Http, Json};
match self {
Base64Decode(err) => Some(err as &dyn Err),
Http(err) => Some(err as &dyn Err),
Json(err) => Some(err as &dyn Err),
Auth(err) => Some(err as &dyn Err),
_ => None,
}
}
fn source(&self) -> Option<&(dyn Err + 'static)> {
use Error::{Auth, Base64Decode, Http, Json};
match self {
Base64Decode(err) => Some(err as &dyn Err),
Http(err) => Some(err as &dyn Err),
Json(err) => Some(err as &dyn Err),
Auth(err) => Some(err as &dyn Err),
_ => None,
}
}
}
impl From<base64::DecodeError> for Error {
fn from(e: base64::DecodeError) -> Self {
Error::Base64Decode(e)
}
}
impl From<http::Error> for Error {
fn from(e: http::Error) -> Self {
Error::Http(e)
}
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Self {
Error::Json(e)
}
}
#[derive(serde::Deserialize, Debug)]
pub struct AuthError {
/// Top level error type
pub error: Option<String>,
/// More specific details on the error
pub error_description: Option<String>,
}
impl fmt::Display for AuthError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(ref err) = self.error {
write!(f, "{}", err)?;
if let Some(ref desc) = self.error_description {
write!(f, "desc: {}", desc)?;
}
}
Ok(())
}
}
impl std::error::Error for AuthError {}
|
/// An enum to represent all characters in the Tifinagh block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Tifinagh {
/// \u{2d30}: 'ⴰ'
LetterYa,
/// \u{2d31}: 'ⴱ'
LetterYab,
/// \u{2d32}: 'ⴲ'
LetterYabh,
/// \u{2d33}: 'ⴳ'
LetterYag,
/// \u{2d34}: 'ⴴ'
LetterYaghh,
/// \u{2d35}: 'ⴵ'
LetterBerberAcademyYaj,
/// \u{2d36}: 'ⴶ'
LetterYaj,
/// \u{2d37}: 'ⴷ'
LetterYad,
/// \u{2d38}: 'ⴸ'
LetterYadh,
/// \u{2d39}: 'ⴹ'
LetterYadd,
/// \u{2d3a}: 'ⴺ'
LetterYaddh,
/// \u{2d3b}: 'ⴻ'
LetterYey,
/// \u{2d3c}: 'ⴼ'
LetterYaf,
/// \u{2d3d}: 'ⴽ'
LetterYak,
/// \u{2d3e}: 'ⴾ'
LetterTuaregYak,
/// \u{2d3f}: 'ⴿ'
LetterYakhh,
/// \u{2d40}: 'ⵀ'
LetterYah,
/// \u{2d41}: 'ⵁ'
LetterBerberAcademyYah,
/// \u{2d42}: 'ⵂ'
LetterTuaregYah,
/// \u{2d43}: 'ⵃ'
LetterYahh,
/// \u{2d44}: 'ⵄ'
LetterYaa,
/// \u{2d45}: 'ⵅ'
LetterYakh,
/// \u{2d46}: 'ⵆ'
LetterTuaregYakh,
/// \u{2d47}: 'ⵇ'
LetterYaq,
/// \u{2d48}: 'ⵈ'
LetterTuaregYaq,
/// \u{2d49}: 'ⵉ'
LetterYi,
/// \u{2d4a}: 'ⵊ'
LetterYazh,
/// \u{2d4b}: 'ⵋ'
LetterAhaggarYazh,
/// \u{2d4c}: 'ⵌ'
LetterTuaregYazh,
/// \u{2d4d}: 'ⵍ'
LetterYal,
/// \u{2d4e}: 'ⵎ'
LetterYam,
/// \u{2d4f}: 'ⵏ'
LetterYan,
/// \u{2d50}: 'ⵐ'
LetterTuaregYagn,
/// \u{2d51}: 'ⵑ'
LetterTuaregYang,
/// \u{2d52}: 'ⵒ'
LetterYap,
/// \u{2d53}: 'ⵓ'
LetterYu,
/// \u{2d54}: 'ⵔ'
LetterYar,
/// \u{2d55}: 'ⵕ'
LetterYarr,
/// \u{2d56}: 'ⵖ'
LetterYagh,
/// \u{2d57}: 'ⵗ'
LetterTuaregYagh,
/// \u{2d58}: 'ⵘ'
LetterAyerYagh,
/// \u{2d59}: 'ⵙ'
LetterYas,
/// \u{2d5a}: 'ⵚ'
LetterYass,
/// \u{2d5b}: 'ⵛ'
LetterYash,
/// \u{2d5c}: 'ⵜ'
LetterYat,
/// \u{2d5d}: 'ⵝ'
LetterYath,
/// \u{2d5e}: 'ⵞ'
LetterYach,
/// \u{2d5f}: 'ⵟ'
LetterYatt,
/// \u{2d60}: 'ⵠ'
LetterYav,
/// \u{2d61}: 'ⵡ'
LetterYaw,
/// \u{2d62}: 'ⵢ'
LetterYay,
/// \u{2d63}: 'ⵣ'
LetterYaz,
/// \u{2d64}: 'ⵤ'
LetterTawellemetYaz,
/// \u{2d65}: 'ⵥ'
LetterYazz,
/// \u{2d66}: 'ⵦ'
LetterYe,
/// \u{2d67}: 'ⵧ'
LetterYo,
/// \u{2d6f}: 'ⵯ'
ModifierLetterLabializationMark,
/// \u{2d70}: '⵰'
SeparatorMark,
}
impl Into<char> for Tifinagh {
fn into(self) -> char {
match self {
Tifinagh::LetterYa => 'ⴰ',
Tifinagh::LetterYab => 'ⴱ',
Tifinagh::LetterYabh => 'ⴲ',
Tifinagh::LetterYag => 'ⴳ',
Tifinagh::LetterYaghh => 'ⴴ',
Tifinagh::LetterBerberAcademyYaj => 'ⴵ',
Tifinagh::LetterYaj => 'ⴶ',
Tifinagh::LetterYad => 'ⴷ',
Tifinagh::LetterYadh => 'ⴸ',
Tifinagh::LetterYadd => 'ⴹ',
Tifinagh::LetterYaddh => 'ⴺ',
Tifinagh::LetterYey => 'ⴻ',
Tifinagh::LetterYaf => 'ⴼ',
Tifinagh::LetterYak => 'ⴽ',
Tifinagh::LetterTuaregYak => 'ⴾ',
Tifinagh::LetterYakhh => 'ⴿ',
Tifinagh::LetterYah => 'ⵀ',
Tifinagh::LetterBerberAcademyYah => 'ⵁ',
Tifinagh::LetterTuaregYah => 'ⵂ',
Tifinagh::LetterYahh => 'ⵃ',
Tifinagh::LetterYaa => 'ⵄ',
Tifinagh::LetterYakh => 'ⵅ',
Tifinagh::LetterTuaregYakh => 'ⵆ',
Tifinagh::LetterYaq => 'ⵇ',
Tifinagh::LetterTuaregYaq => 'ⵈ',
Tifinagh::LetterYi => 'ⵉ',
Tifinagh::LetterYazh => 'ⵊ',
Tifinagh::LetterAhaggarYazh => 'ⵋ',
Tifinagh::LetterTuaregYazh => 'ⵌ',
Tifinagh::LetterYal => 'ⵍ',
Tifinagh::LetterYam => 'ⵎ',
Tifinagh::LetterYan => 'ⵏ',
Tifinagh::LetterTuaregYagn => 'ⵐ',
Tifinagh::LetterTuaregYang => 'ⵑ',
Tifinagh::LetterYap => 'ⵒ',
Tifinagh::LetterYu => 'ⵓ',
Tifinagh::LetterYar => 'ⵔ',
Tifinagh::LetterYarr => 'ⵕ',
Tifinagh::LetterYagh => 'ⵖ',
Tifinagh::LetterTuaregYagh => 'ⵗ',
Tifinagh::LetterAyerYagh => 'ⵘ',
Tifinagh::LetterYas => 'ⵙ',
Tifinagh::LetterYass => 'ⵚ',
Tifinagh::LetterYash => 'ⵛ',
Tifinagh::LetterYat => 'ⵜ',
Tifinagh::LetterYath => 'ⵝ',
Tifinagh::LetterYach => 'ⵞ',
Tifinagh::LetterYatt => 'ⵟ',
Tifinagh::LetterYav => 'ⵠ',
Tifinagh::LetterYaw => 'ⵡ',
Tifinagh::LetterYay => 'ⵢ',
Tifinagh::LetterYaz => 'ⵣ',
Tifinagh::LetterTawellemetYaz => 'ⵤ',
Tifinagh::LetterYazz => 'ⵥ',
Tifinagh::LetterYe => 'ⵦ',
Tifinagh::LetterYo => 'ⵧ',
Tifinagh::ModifierLetterLabializationMark => 'ⵯ',
Tifinagh::SeparatorMark => '⵰',
}
}
}
impl std::convert::TryFrom<char> for Tifinagh {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ⴰ' => Ok(Tifinagh::LetterYa),
'ⴱ' => Ok(Tifinagh::LetterYab),
'ⴲ' => Ok(Tifinagh::LetterYabh),
'ⴳ' => Ok(Tifinagh::LetterYag),
'ⴴ' => Ok(Tifinagh::LetterYaghh),
'ⴵ' => Ok(Tifinagh::LetterBerberAcademyYaj),
'ⴶ' => Ok(Tifinagh::LetterYaj),
'ⴷ' => Ok(Tifinagh::LetterYad),
'ⴸ' => Ok(Tifinagh::LetterYadh),
'ⴹ' => Ok(Tifinagh::LetterYadd),
'ⴺ' => Ok(Tifinagh::LetterYaddh),
'ⴻ' => Ok(Tifinagh::LetterYey),
'ⴼ' => Ok(Tifinagh::LetterYaf),
'ⴽ' => Ok(Tifinagh::LetterYak),
'ⴾ' => Ok(Tifinagh::LetterTuaregYak),
'ⴿ' => Ok(Tifinagh::LetterYakhh),
'ⵀ' => Ok(Tifinagh::LetterYah),
'ⵁ' => Ok(Tifinagh::LetterBerberAcademyYah),
'ⵂ' => Ok(Tifinagh::LetterTuaregYah),
'ⵃ' => Ok(Tifinagh::LetterYahh),
'ⵄ' => Ok(Tifinagh::LetterYaa),
'ⵅ' => Ok(Tifinagh::LetterYakh),
'ⵆ' => Ok(Tifinagh::LetterTuaregYakh),
'ⵇ' => Ok(Tifinagh::LetterYaq),
'ⵈ' => Ok(Tifinagh::LetterTuaregYaq),
'ⵉ' => Ok(Tifinagh::LetterYi),
'ⵊ' => Ok(Tifinagh::LetterYazh),
'ⵋ' => Ok(Tifinagh::LetterAhaggarYazh),
'ⵌ' => Ok(Tifinagh::LetterTuaregYazh),
'ⵍ' => Ok(Tifinagh::LetterYal),
'ⵎ' => Ok(Tifinagh::LetterYam),
'ⵏ' => Ok(Tifinagh::LetterYan),
'ⵐ' => Ok(Tifinagh::LetterTuaregYagn),
'ⵑ' => Ok(Tifinagh::LetterTuaregYang),
'ⵒ' => Ok(Tifinagh::LetterYap),
'ⵓ' => Ok(Tifinagh::LetterYu),
'ⵔ' => Ok(Tifinagh::LetterYar),
'ⵕ' => Ok(Tifinagh::LetterYarr),
'ⵖ' => Ok(Tifinagh::LetterYagh),
'ⵗ' => Ok(Tifinagh::LetterTuaregYagh),
'ⵘ' => Ok(Tifinagh::LetterAyerYagh),
'ⵙ' => Ok(Tifinagh::LetterYas),
'ⵚ' => Ok(Tifinagh::LetterYass),
'ⵛ' => Ok(Tifinagh::LetterYash),
'ⵜ' => Ok(Tifinagh::LetterYat),
'ⵝ' => Ok(Tifinagh::LetterYath),
'ⵞ' => Ok(Tifinagh::LetterYach),
'ⵟ' => Ok(Tifinagh::LetterYatt),
'ⵠ' => Ok(Tifinagh::LetterYav),
'ⵡ' => Ok(Tifinagh::LetterYaw),
'ⵢ' => Ok(Tifinagh::LetterYay),
'ⵣ' => Ok(Tifinagh::LetterYaz),
'ⵤ' => Ok(Tifinagh::LetterTawellemetYaz),
'ⵥ' => Ok(Tifinagh::LetterYazz),
'ⵦ' => Ok(Tifinagh::LetterYe),
'ⵧ' => Ok(Tifinagh::LetterYo),
'ⵯ' => Ok(Tifinagh::ModifierLetterLabializationMark),
'⵰' => Ok(Tifinagh::SeparatorMark),
_ => Err(()),
}
}
}
impl Into<u32> for Tifinagh {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Tifinagh {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Tifinagh {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Tifinagh {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Tifinagh::LetterYa
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Tifinagh{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
pub mod config;
pub mod environment;
pub mod files;
|
use crate::import::*;
use crate::process::registry::{ProcessRegistry, Register, Unregister};
use crate::node::NodeController;
use crate::util::RpcMethod;
use actix::dev::{ContextParts, Mailbox, ContextFut, AsyncContextParts, ToEnvelope, Envelope, RecipientRequest};
use actix::Handler;
use std::pin::Pin;
use crate::{MethodCall};
use prost::{DecodeError, EncodeError};
pub mod registry;
#[derive(Debug, Clone, Copy)]
pub enum DispatchError {
ProcessNotFound = 1,
MethodNotFound = 2,
NodeNotFound = 3,
MessageFormat,
Timeout,
MailboxRemote,
MailboxLocal,
Protocol,
Other,
}
impl DispatchError {
pub fn code(&self) -> i32 {
use DispatchError::*;
match self {
ProcessNotFound => 1,
MethodNotFound => 2,
NodeNotFound => 3,
MessageFormat => 4,
Timeout => 5,
_ => 99
}
}
pub fn from_code(v: i32) -> Self {
use DispatchError::*;
match v {
1 => ProcessNotFound,
2 => MethodNotFound,
3 => NodeNotFound,
4 => MessageFormat,
5 => Timeout,
_ => Other
}
}
}
impl From<&DispatchError> for DispatchError {
fn from(v: &DispatchError) -> Self {
*v
}
}
impl From<DecodeError> for DispatchError {
fn from(e: DecodeError) -> Self {
DispatchError::MessageFormat
}
}
impl From<EncodeError> for DispatchError {
fn from(_: EncodeError) -> Self {
DispatchError::MessageFormat
}
}
/// Trait used to get generic dispatchers for different actors
pub trait Dispatcher: Send + 'static {
/// Lookup the method, deserialize to proper type, execute, serialize and return
fn dispatch(&self, method: u32, data: Bytes) -> BoxFuture<'static, Result<Bytes, DispatchError>>;
}
/// Trait which must be implemented for all processes.
///
/// The implementation of this trait is responsible for serializing/deserializing messages into proper structures,
/// and should by implemented by a proc macro
pub trait DynHandler: Actor<Context=Process<Self>> {
fn make_dispatcher(addr: WeakAddr<Self>) -> Box<dyn Dispatcher>;
}
/// A special execution context. In this context the actor has a stable identity,
/// which should not change. It can also receive messages from remote nodes.
pub struct Process<A: Actor<Context=Self>>
{
id: Uuid,
parts: ContextParts<A>,
mb: Option<Mailbox<A>>,
}
impl<A: Actor<Context=Self>> Process<A> where A: DynHandler
{
/// Start a new process
pub fn start(a: A) -> Pid<A> {
Self::start_with(|_| a)
}
/// Start a new process, with the ability to manipiulate its context before actual startup
pub fn start_with(f: impl FnOnce(&mut Self) -> A) -> Pid<A> {
let (tx, rx) = actix::dev::channel::channel(8);
// Global process registry
let id = Uuid::new_v4();
let parts = ContextParts::new(rx.sender_producer());
let mut proc = Process {
id,
parts,
mb: Some(actix::dev::Mailbox::new(rx)),
};
let act = f(&mut proc);
proc.run(act)
}
/// Get [Pid] of current process
pub fn pid(&self) -> Pid<A> {
return Pid::Local {
id: self.id.clone(),
addr: self.parts.address(),
};
}
fn run(mut self, act: A) -> Pid<A> {
let id = self.id;
let pid = self.pid();
let fut = self.into_fut(act);
// Register this process with registry when starting
ProcessRegistry::from_registry().do_send(Register::new(pid.clone()));
let fut = fut.map(move |_| {
ProcessRegistry::from_registry().do_send(Unregister { id });
});
actix_rt::spawn(fut);
pid
}
fn into_fut(mut self, act: A) -> ContextFut<A, Self> {
let mb = self.mb.take().unwrap();
ContextFut::new(self, act, mb)
}
}
impl<A: Actor<Context=Self>> AsyncContextParts<A> for Process<A>
{
fn parts(&mut self) -> &mut ContextParts<A> {
&mut self.parts
}
}
impl<A: Actor<Context=Self>> ActorContext for Process<A>
{
fn stop(&mut self) {
self.parts.stop()
}
fn terminate(&mut self) {
self.parts.terminate()
}
fn state(&self) -> ActorState {
self.parts.state()
}
}
impl<A: Actor<Context=Self>> AsyncContext<A> for Process<A>
{
fn address(&self) -> Addr<A> {
self.parts.address()
}
fn spawn<F>(&mut self, fut: F) -> SpawnHandle where
F: ActorFuture<Output=(), Actor=A> + 'static {
self.parts.spawn(fut)
}
fn wait<F>(&mut self, fut: F) where
F: ActorFuture<Output=(), Actor=A> + 'static {
self.parts.wait(fut)
}
fn waiting(&self) -> bool {
self.parts.waiting()
}
fn cancel_future(&mut self, handle: SpawnHandle) -> bool {
self.parts.cancel_future(handle)
}
}
impl<A: Actor<Context=Self>, M: Message> ToEnvelope<A, M> for Process<A>
where A: Handler<M>,
M: Send + 'static,
M::Result: Send
{
fn pack(msg: M, tx: Option<Sender<<M as Message>::Result>>) -> Envelope<A> {
Envelope::new(msg, tx)
}
}
/// Global process identifier. This can be used to send messages to actors on different nodes.
pub enum Pid<A: Actor + DynHandler> {
Local {
id: Uuid,
addr: Addr<A>,
},
Remote(Uuid),
}
impl<A: Actor + DynHandler> Clone for Pid<A> {
fn clone(&self) -> Self {
match self {
Pid::Local {
id, addr
} => Pid::Local { id: id.clone(), addr: addr.clone() },
Pid::Remote(r) => Pid::Remote(*r),
}
}
}
impl<A: Actor + DynHandler> Pid<A> {
pub fn from(uuid: Uuid) -> Self {
Self::Remote(uuid)
}
pub fn into_remote(self) -> Self {
Self::Remote(self.id())
}
pub fn local_addr(&self) -> Option<Addr<A>> {
match self {
Pid::Local { addr, .. } => Some(addr.clone()),
_ => None
}
}
pub fn id(&self) -> Uuid {
match self {
Pid::Local { id, .. } => id.clone(),
Pid::Remote(id) => id.clone()
}
}
pub fn send<M>(&self, m: M) -> PidRequest<A, M>
where A: Handler<M>,
A::Context: ToEnvelope<A, M>,
M: Message + RpcMethod + Send,
M::Result: Send,
{
match self {
Pid::Local { addr, .. } => PidRequest::Local(addr.send(m)),
Pid::Remote(id) => {
let dispatch = m.make_call(Some(*id));
PidRequest::Remote(ProcessRegistry::from_registry().send(dispatch))
}
}
}
pub fn do_send<M>(&self, m: M)
where A: Handler<M>,
A::Context: ToEnvelope<A, M>,
M: Message + RpcMethod + Send,
M::Result: Send,
{
match self {
Self::Local { addr, .. } => addr.do_send(m),
Self::Remote(id) => {
let dispatch = m.make_broadcast(Some(*id));
ProcessRegistry::from_registry().do_send(dispatch)
}
}
}
pub fn recipient<M>(&self) -> PidRecipient<M>
where A: Handler<M>,
A::Context: ToEnvelope<A, M>,
M: Message + RpcMethod + Send + 'static,
M::Result: Send,
{
match self {
Self::Local { addr, id } => PidRecipient {
id: id.clone(),
local: Some(addr.clone().recipient()),
},
Self::Remote(id) => PidRecipient {
id: id.clone(),
local: None,
}
}
}
}
/// Request to send message to remote process
/// This can only be used to send addressed messages
pub enum PidRequest<A, M>
where A: Actor + Handler<M>,
A::Context: ToEnvelope<A, M>,
M: Message
{
Local(Request<A, M>),
Remote(Request<ProcessRegistry, MethodCall>),
}
impl<A: Actor, M: Message> Future for PidRequest<A, M>
where A: Actor + Handler<M>,
A::Context: ToEnvelope<A, M>,
M: Message + RpcMethod + Unpin + Send,
M::Result: Send
{
type Output = Result<M::Result, DispatchError>;
fn poll(self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> {
match self.get_mut() {
PidRequest::Local(r) => {
r.poll_unpin(cx).map_err(|e| DispatchError::MailboxRemote)
}
PidRequest::Remote(r) => {
match futures::ready!(r.poll_unpin(cx)) {
Ok(Ok(res)) => {
Poll::Ready(Ok(<M as RpcMethod>::read_result(res)))
}
Ok(Err(err)) => Poll::Ready(Err(err)),
Err(mailbox) => Poll::Ready(Err(DispatchError::MailboxLocal)),
}
}
}
}
}
/// Can be used to send messages across network without knowing the type of the actor receiving them
pub struct PidRecipient<M>
where M: Message + Send,
M::Result: Send,
{
pub(crate) id: Uuid,
pub(crate) local: Option<Recipient<M>>,
}
impl<M> PidRecipient<M>
where M: Message + Send,
M::Result: Send {
/// Create a recipient from just an id
pub fn from_id(id: Uuid) -> Self {
Self {
id,
local: None,
}
}
}
impl<M> Clone for PidRecipient<M>
where M: Message + Send,
M::Result: Send {
fn clone(&self) -> Self {
Self {
id: self.id,
local: self.local.clone(),
}
}
}
impl<M> PidRecipient<M>
where M: Message + RpcMethod + Send,
M::Result: Send,
{
pub fn send(&self, m: M) -> PidRecipientRequest<M> {
if let Some(ref local) = self.local {
return PidRecipientRequest::Local(local.send(m));
} else {
let dispatch = m.make_call(Some(self.id));
PidRecipientRequest::Remote(ProcessRegistry::from_registry().send(dispatch))
}
}
pub fn do_send(&self, m: M) -> Result<(), SendError<M>> {
if let Some(ref local) = self.local {
local.do_send(m)
} else {
let dispatch = m.make_call(Some(self.id));
Ok(ProcessRegistry::from_registry().do_send(dispatch))
}
}
}
pub enum PidRecipientRequest<M>
where M: Message + Send + 'static,
M::Result: Send {
Local(RecipientRequest<M>),
Remote(Request<ProcessRegistry, MethodCall>),
}
impl<M: Message> Future for PidRecipientRequest<M>
where M: Message + RpcMethod + Unpin + Send,
M::Result: Send
{
type Output = Result<M::Result, DispatchError>;
fn poll(self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> {
match self.get_mut() {
Self::Local(r) => {
r.poll_unpin(cx).map_err(|e| DispatchError::MailboxLocal)
}
Self::Remote(r) => {
match futures::ready!(r.poll_unpin(cx)) {
Ok(Ok(res)) => {
Poll::Ready(Ok(<M as RpcMethod>::read_result(res)))
}
Ok(Err(err)) => Poll::Ready(Err(err)),
Err(mailbox) => Poll::Ready(Err(DispatchError::MailboxLocal)),
}
}
}
}
} |
use actix_web::{post,web,HttpResponse,Responder};
use log::info;
use serde::{Serialize,Deserialize};
use std::process::Command;
#[derive(Debug,Deserialize, Serialize)]
pub struct Request{
commands : String,
}
#[derive(Debug,Deserialize,Serialize)]
pub struct Response{
result: bool,
}
#[post("/execute")]
pub async fn execute_command(request:web::Json<Request>) ->impl Responder{
info!("validating password for {}", request.commands);
let process = Command::new("sh")
.arg(&request.commands)
.status()
.expect("Failed to execute command");
info!("status : {}", &process.to_string());
if process.success(){
HttpResponse::Ok().json(Response{result: true});
}else {
HttpResponse::Ok().json(Response{result: false});
}
} |
use napi::*;
use crate::image::{Image, ImageData};
use crate::pattern::Pattern;
use crate::sk::*;
#[repr(u8)]
enum ImageKind {
ImageData,
Image,
}
impl From<u32> for ImageKind {
fn from(value: u32) -> Self {
match value {
0 => Self::ImageData,
1 => Self::Image,
_ => Self::Image,
}
}
}
#[js_function(3)]
pub fn canvas_pattern_constructor(ctx: CallContext) -> Result<JsUndefined> {
let image_or_data = ctx.get::<JsObject>(0)?;
let repetition = ctx.get::<JsUnknown>(1)?;
let image_kind: ImageKind = ctx.get::<JsNumber>(2)?.get_uint32()?.into();
let mut this: JsObject = ctx.this_unchecked();
let bitmap = match image_kind {
ImageKind::Image => {
let native_object = ctx.env.unwrap::<Image>(&image_or_data)?;
if let Some(bitmap) = native_object.bitmap.as_ref() {
bitmap.bitmap
} else {
return Err(Error::new(
Status::GenericFailure,
"Image has not completed".to_string(),
));
}
}
ImageKind::ImageData => {
let native_object = ctx.env.unwrap::<ImageData>(&image_or_data)?;
let image_size = native_object.width * native_object.height * 4usize;
let bitmap = Bitmap::from_image_data(
native_object.data as *mut u8,
native_object.width,
native_object.height,
native_object.width * 4usize,
image_size,
ColorType::RGBA8888,
AlphaType::Unpremultiplied,
);
let bitmap_object = ctx.env.create_external(bitmap, Some(image_size as i64))?;
let bitmap = ctx.env.get_value_external::<Bitmap>(&bitmap_object)?.bitmap;
// wrap Bitmap to `this`, prevent it to be dropped
this.set_named_property("_bitmap", bitmap_object)?;
bitmap
}
};
let (repeat_x, repeat_y) = match repetition.get_type()? {
ValueType::Null => (TileMode::Repeat, TileMode::Repeat),
ValueType::String => {
let repetition_str = unsafe { repetition.cast::<JsString>() }.into_utf8()?;
match repetition_str.as_str()? {
"" | "repeat" => (TileMode::Repeat, TileMode::Repeat),
"repeat-x" => (TileMode::Repeat, TileMode::Decal),
"repeat-y" => (TileMode::Decal, TileMode::Repeat),
"no-repeat" => (TileMode::Decal, TileMode::Decal),
_ => {
return Err(Error::new(
Status::InvalidArg,
format!("{} is not valid repetition rule", repetition_str.as_str()?),
))
}
}
}
_ => {
return Err(Error::new(
Status::InvalidArg,
"Invalid type of image repetition".to_string(),
))
}
};
ctx.env.wrap(
&mut this,
Pattern::ImagePattern(ImagePattern {
transform: Transform::default(),
bitmap,
repeat_x,
repeat_y,
}),
)?;
ctx.env.get_undefined()
}
#[js_function(1)]
pub fn set_transform(ctx: CallContext) -> Result<JsUndefined> {
let this: JsObject = ctx.this_unchecked();
let transform_object = ctx.get::<JsObject>(0)?;
let a: f64 = transform_object
.get_named_property::<JsNumber>("a")?
.get_double()?;
let b: f64 = transform_object
.get_named_property::<JsNumber>("b")?
.get_double()?;
let c: f64 = transform_object
.get_named_property::<JsNumber>("c")?
.get_double()?;
let d: f64 = transform_object
.get_named_property::<JsNumber>("d")?
.get_double()?;
let e: f64 = transform_object
.get_named_property::<JsNumber>("e")?
.get_double()?;
let f: f64 = transform_object
.get_named_property::<JsNumber>("f")?
.get_double()?;
let transform = Transform::new(a as f32, b as f32, c as f32, d as f32, e as f32, f as f32);
let pattern = ctx.env.unwrap::<Pattern>(&this)?;
if let Pattern::ImagePattern(pattern) = pattern {
pattern.transform = transform;
}
ctx.env.get_undefined()
}
|
pub mod gridstore;
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Project {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ProjectProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProjectProperties {
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
#[serde(rename = "serviceEndpoint", default, skip_serializing_if = "Option::is_none")]
pub service_endpoint: Option<String>,
#[serde(rename = "assessmentSolutionId", default, skip_serializing_if = "Option::is_none")]
pub assessment_solution_id: Option<String>,
#[serde(rename = "projectStatus", default, skip_serializing_if = "Option::is_none")]
pub project_status: Option<project_properties::ProjectStatus>,
#[serde(rename = "customerWorkspaceId", default, skip_serializing_if = "Option::is_none")]
pub customer_workspace_id: Option<String>,
#[serde(rename = "customerWorkspaceLocation", default, skip_serializing_if = "Option::is_none")]
pub customer_workspace_location: Option<String>,
#[serde(rename = "numberOfGroups", default, skip_serializing_if = "Option::is_none")]
pub number_of_groups: Option<i32>,
#[serde(rename = "numberOfMachines", default, skip_serializing_if = "Option::is_none")]
pub number_of_machines: Option<i32>,
#[serde(rename = "numberOfAssessments", default, skip_serializing_if = "Option::is_none")]
pub number_of_assessments: Option<i32>,
#[serde(rename = "lastAssessmentTimestamp", default, skip_serializing_if = "Option::is_none")]
pub last_assessment_timestamp: Option<String>,
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<String>,
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
#[serde(rename = "customerStorageAccountArmId", default, skip_serializing_if = "Option::is_none")]
pub customer_storage_account_arm_id: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<project_properties::ProvisioningState>,
}
pub mod project_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProjectStatus {
Active,
Inactive,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Accepted,
Creating,
Deleting,
Failed,
Moving,
Succeeded,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Group {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub properties: GroupProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupProperties {
#[serde(rename = "groupStatus", default, skip_serializing_if = "Option::is_none")]
pub group_status: Option<group_properties::GroupStatus>,
#[serde(rename = "machineCount", default, skip_serializing_if = "Option::is_none")]
pub machine_count: Option<i32>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub assessments: Vec<String>,
#[serde(rename = "areAssessmentsRunning", default, skip_serializing_if = "Option::is_none")]
pub are_assessments_running: Option<bool>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
#[serde(rename = "groupType", default, skip_serializing_if = "Option::is_none")]
pub group_type: Option<String>,
}
pub mod group_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum GroupStatus {
Created,
Updated,
Running,
Completed,
Invalid,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateGroupBody {
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<GroupBodyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupBodyProperties {
#[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")]
pub operation_type: Option<group_body_properties::OperationType>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub machines: Vec<String>,
}
pub mod group_body_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum OperationType {
Add,
Remove,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Assessment {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub properties: AssessmentProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessmentProperties {
#[serde(rename = "azureLocation")]
pub azure_location: assessment_properties::AzureLocation,
#[serde(rename = "azureOfferCode")]
pub azure_offer_code: assessment_properties::AzureOfferCode,
#[serde(rename = "eaSubscriptionId", default, skip_serializing_if = "Option::is_none")]
pub ea_subscription_id: Option<String>,
#[serde(rename = "azurePricingTier")]
pub azure_pricing_tier: assessment_properties::AzurePricingTier,
#[serde(rename = "azureStorageRedundancy")]
pub azure_storage_redundancy: assessment_properties::AzureStorageRedundancy,
#[serde(rename = "scalingFactor")]
pub scaling_factor: f64,
pub percentile: assessment_properties::Percentile,
#[serde(rename = "timeRange")]
pub time_range: assessment_properties::TimeRange,
#[serde(rename = "perfDataStartTime", default, skip_serializing_if = "Option::is_none")]
pub perf_data_start_time: Option<String>,
#[serde(rename = "perfDataEndTime", default, skip_serializing_if = "Option::is_none")]
pub perf_data_end_time: Option<String>,
pub stage: assessment_properties::Stage,
pub currency: assessment_properties::Currency,
#[serde(rename = "azureHybridUseBenefit")]
pub azure_hybrid_use_benefit: assessment_properties::AzureHybridUseBenefit,
#[serde(rename = "discountPercentage")]
pub discount_percentage: f64,
#[serde(rename = "confidenceRatingInPercentage", default, skip_serializing_if = "Option::is_none")]
pub confidence_rating_in_percentage: Option<f64>,
#[serde(rename = "sizingCriterion")]
pub sizing_criterion: assessment_properties::SizingCriterion,
#[serde(rename = "reservedInstance")]
pub reserved_instance: assessment_properties::ReservedInstance,
#[serde(rename = "azureVmFamilies")]
pub azure_vm_families: Vec<String>,
#[serde(rename = "azureDiskType")]
pub azure_disk_type: assessment_properties::AzureDiskType,
#[serde(rename = "vmUptime")]
pub vm_uptime: VmUptime,
#[serde(rename = "pricesTimestamp", default, skip_serializing_if = "Option::is_none")]
pub prices_timestamp: Option<String>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
#[serde(rename = "monthlyComputeCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_compute_cost: Option<f64>,
#[serde(rename = "monthlyBandwidthCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_bandwidth_cost: Option<f64>,
#[serde(rename = "monthlyStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_storage_cost: Option<f64>,
#[serde(rename = "monthlyPremiumStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_premium_storage_cost: Option<f64>,
#[serde(rename = "monthlyStandardSSDStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_standard_ssd_storage_cost: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<assessment_properties::Status>,
#[serde(rename = "numberOfMachines", default, skip_serializing_if = "Option::is_none")]
pub number_of_machines: Option<i32>,
}
pub mod assessment_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureLocation {
Unknown,
EastAsia,
SoutheastAsia,
AustraliaEast,
AustraliaSoutheast,
BrazilSouth,
CanadaCentral,
CanadaEast,
WestEurope,
NorthEurope,
CentralIndia,
SouthIndia,
WestIndia,
JapanEast,
JapanWest,
KoreaCentral,
KoreaSouth,
UkWest,
UkSouth,
NorthCentralUs,
EastUs,
WestUs2,
SouthCentralUs,
CentralUs,
EastUs2,
WestUs,
WestCentralUs,
GermanyCentral,
GermanyNortheast,
ChinaNorth,
ChinaEast,
#[serde(rename = "USGovArizona")]
UsGovArizona,
#[serde(rename = "USGovTexas")]
UsGovTexas,
#[serde(rename = "USGovIowa")]
UsGovIowa,
#[serde(rename = "USGovVirginia")]
UsGovVirginia,
#[serde(rename = "USDoDCentral")]
UsDoDCentral,
#[serde(rename = "USDoDEast")]
UsDoDEast,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureOfferCode {
Unknown,
#[serde(rename = "MSAZR0003P")]
Msazr0003p,
#[serde(rename = "MSAZR0044P")]
Msazr0044p,
#[serde(rename = "MSAZR0059P")]
Msazr0059p,
#[serde(rename = "MSAZR0060P")]
Msazr0060p,
#[serde(rename = "MSAZR0062P")]
Msazr0062p,
#[serde(rename = "MSAZR0063P")]
Msazr0063p,
#[serde(rename = "MSAZR0064P")]
Msazr0064p,
#[serde(rename = "MSAZR0029P")]
Msazr0029p,
#[serde(rename = "MSAZR0022P")]
Msazr0022p,
#[serde(rename = "MSAZR0023P")]
Msazr0023p,
#[serde(rename = "MSAZR0148P")]
Msazr0148p,
#[serde(rename = "MSAZR0025P")]
Msazr0025p,
#[serde(rename = "MSAZR0036P")]
Msazr0036p,
#[serde(rename = "MSAZR0120P")]
Msazr0120p,
#[serde(rename = "MSAZR0121P")]
Msazr0121p,
#[serde(rename = "MSAZR0122P")]
Msazr0122p,
#[serde(rename = "MSAZR0123P")]
Msazr0123p,
#[serde(rename = "MSAZR0124P")]
Msazr0124p,
#[serde(rename = "MSAZR0125P")]
Msazr0125p,
#[serde(rename = "MSAZR0126P")]
Msazr0126p,
#[serde(rename = "MSAZR0127P")]
Msazr0127p,
#[serde(rename = "MSAZR0128P")]
Msazr0128p,
#[serde(rename = "MSAZR0129P")]
Msazr0129p,
#[serde(rename = "MSAZR0130P")]
Msazr0130p,
#[serde(rename = "MSAZR0111P")]
Msazr0111p,
#[serde(rename = "MSAZR0144P")]
Msazr0144p,
#[serde(rename = "MSAZR0149P")]
Msazr0149p,
#[serde(rename = "MSMCAZR0044P")]
Msmcazr0044p,
#[serde(rename = "MSMCAZR0059P")]
Msmcazr0059p,
#[serde(rename = "MSMCAZR0060P")]
Msmcazr0060p,
#[serde(rename = "MSMCAZR0063P")]
Msmcazr0063p,
#[serde(rename = "MSMCAZR0120P")]
Msmcazr0120p,
#[serde(rename = "MSMCAZR0121P")]
Msmcazr0121p,
#[serde(rename = "MSMCAZR0125P")]
Msmcazr0125p,
#[serde(rename = "MSMCAZR0128P")]
Msmcazr0128p,
#[serde(rename = "MSAZRDE0003P")]
Msazrde0003p,
#[serde(rename = "MSAZRDE0044P")]
Msazrde0044p,
#[serde(rename = "MSAZRUSGOV0003P")]
Msazrusgov0003p,
#[serde(rename = "EA")]
Ea,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzurePricingTier {
Standard,
Basic,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureStorageRedundancy {
Unknown,
LocallyRedundant,
ZoneRedundant,
GeoRedundant,
ReadAccessGeoRedundant,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Percentile {
Percentile50,
Percentile90,
Percentile95,
Percentile99,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TimeRange {
Day,
Week,
Month,
Custom,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Stage {
InProgress,
UnderReview,
Approved,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Currency {
Unknown,
#[serde(rename = "USD")]
Usd,
#[serde(rename = "DKK")]
Dkk,
#[serde(rename = "CAD")]
Cad,
#[serde(rename = "IDR")]
Idr,
#[serde(rename = "JPY")]
Jpy,
#[serde(rename = "KRW")]
Krw,
#[serde(rename = "NZD")]
Nzd,
#[serde(rename = "NOK")]
Nok,
#[serde(rename = "RUB")]
Rub,
#[serde(rename = "SAR")]
Sar,
#[serde(rename = "ZAR")]
Zar,
#[serde(rename = "SEK")]
Sek,
#[serde(rename = "TRY")]
Try,
#[serde(rename = "GBP")]
Gbp,
#[serde(rename = "MXN")]
Mxn,
#[serde(rename = "MYR")]
Myr,
#[serde(rename = "INR")]
Inr,
#[serde(rename = "HKD")]
Hkd,
#[serde(rename = "BRL")]
Brl,
#[serde(rename = "TWD")]
Twd,
#[serde(rename = "EUR")]
Eur,
#[serde(rename = "CHF")]
Chf,
#[serde(rename = "ARS")]
Ars,
#[serde(rename = "AUD")]
Aud,
#[serde(rename = "CNY")]
Cny,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureHybridUseBenefit {
Unknown,
Yes,
No,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SizingCriterion {
PerformanceBased,
AsOnPremises,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReservedInstance {
None,
#[serde(rename = "RI1Year")]
Ri1Year,
#[serde(rename = "RI3Year")]
Ri3Year,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AzureDiskType {
Unknown,
Standard,
Premium,
#[serde(rename = "StandardSSD")]
StandardSsd,
StandardOrPremium,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Created,
Updated,
Running,
Completed,
Invalid,
OutOfSync,
OutDated,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VmUptime {
#[serde(rename = "daysPerMonth", default, skip_serializing_if = "Option::is_none")]
pub days_per_month: Option<f64>,
#[serde(rename = "hoursPerDay", default, skip_serializing_if = "Option::is_none")]
pub hours_per_day: Option<f64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Disk {
#[serde(rename = "gigabytesAllocated", default, skip_serializing_if = "Option::is_none")]
pub gigabytes_allocated: Option<f64>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkAdapter {
#[serde(rename = "macAddress", default, skip_serializing_if = "Option::is_none")]
pub mac_address: Option<String>,
#[serde(rename = "ipAddresses", default, skip_serializing_if = "Vec::is_empty")]
pub ip_addresses: Vec<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Machine {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<MachineProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineProperties {
#[serde(rename = "bootType", default, skip_serializing_if = "Option::is_none")]
pub boot_type: Option<machine_properties::BootType>,
#[serde(rename = "datacenterManagementServerArmId", default, skip_serializing_if = "Option::is_none")]
pub datacenter_management_server_arm_id: Option<String>,
#[serde(rename = "discoveryMachineArmId", default, skip_serializing_if = "Option::is_none")]
pub discovery_machine_arm_id: Option<String>,
#[serde(rename = "datacenterManagementServerName", default, skip_serializing_if = "Option::is_none")]
pub datacenter_management_server_name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "megabytesOfMemory", default, skip_serializing_if = "Option::is_none")]
pub megabytes_of_memory: Option<f64>,
#[serde(rename = "numberOfCores", default, skip_serializing_if = "Option::is_none")]
pub number_of_cores: Option<i64>,
#[serde(rename = "operatingSystemType", default, skip_serializing_if = "Option::is_none")]
pub operating_system_type: Option<String>,
#[serde(rename = "operatingSystemName", default, skip_serializing_if = "Option::is_none")]
pub operating_system_name: Option<String>,
#[serde(rename = "operatingSystemVersion", default, skip_serializing_if = "Option::is_none")]
pub operating_system_version: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub groups: Vec<String>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disks: Option<serde_json::Value>,
#[serde(rename = "networkAdapters", default, skip_serializing_if = "Option::is_none")]
pub network_adapters: Option<serde_json::Value>,
}
pub mod machine_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BootType {
Unknown,
#[serde(rename = "EFI")]
Efi,
#[serde(rename = "BIOS")]
Bios,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessedDisk {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "gigabytesProvisioned", default, skip_serializing_if = "Option::is_none")]
pub gigabytes_provisioned: Option<f64>,
#[serde(rename = "megabytesPerSecondOfRead", default, skip_serializing_if = "Option::is_none")]
pub megabytes_per_second_of_read: Option<f64>,
#[serde(rename = "megabytesPerSecondOfWrite", default, skip_serializing_if = "Option::is_none")]
pub megabytes_per_second_of_write: Option<f64>,
#[serde(rename = "numberOfReadOperationsPerSecond", default, skip_serializing_if = "Option::is_none")]
pub number_of_read_operations_per_second: Option<f64>,
#[serde(rename = "numberOfWriteOperationsPerSecond", default, skip_serializing_if = "Option::is_none")]
pub number_of_write_operations_per_second: Option<f64>,
#[serde(rename = "monthlyStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_storage_cost: Option<f64>,
#[serde(rename = "recommendedDiskType", default, skip_serializing_if = "Option::is_none")]
pub recommended_disk_type: Option<assessed_disk::RecommendedDiskType>,
#[serde(rename = "recommendedDiskSize", default, skip_serializing_if = "Option::is_none")]
pub recommended_disk_size: Option<assessed_disk::RecommendedDiskSize>,
#[serde(rename = "gigabytesForRecommendedDiskSize", default, skip_serializing_if = "Option::is_none")]
pub gigabytes_for_recommended_disk_size: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub suitability: Option<assessed_disk::Suitability>,
#[serde(rename = "suitabilityExplanation", default, skip_serializing_if = "Option::is_none")]
pub suitability_explanation: Option<assessed_disk::SuitabilityExplanation>,
#[serde(rename = "suitabilityDetail", default, skip_serializing_if = "Option::is_none")]
pub suitability_detail: Option<assessed_disk::SuitabilityDetail>,
}
pub mod assessed_disk {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RecommendedDiskType {
Unknown,
Standard,
Premium,
#[serde(rename = "StandardSSD")]
StandardSsd,
StandardOrPremium,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RecommendedDiskSize {
Unknown,
#[serde(rename = "Standard_S4")]
StandardS4,
#[serde(rename = "Standard_S6")]
StandardS6,
#[serde(rename = "Standard_S10")]
StandardS10,
#[serde(rename = "Standard_S15")]
StandardS15,
#[serde(rename = "Standard_S20")]
StandardS20,
#[serde(rename = "Standard_S30")]
StandardS30,
#[serde(rename = "Standard_S40")]
StandardS40,
#[serde(rename = "Standard_S50")]
StandardS50,
#[serde(rename = "Premium_P4")]
PremiumP4,
#[serde(rename = "Premium_P6")]
PremiumP6,
#[serde(rename = "Premium_P10")]
PremiumP10,
#[serde(rename = "Premium_P15")]
PremiumP15,
#[serde(rename = "Premium_P20")]
PremiumP20,
#[serde(rename = "Premium_P30")]
PremiumP30,
#[serde(rename = "Premium_P40")]
PremiumP40,
#[serde(rename = "Premium_P50")]
PremiumP50,
#[serde(rename = "Standard_S60")]
StandardS60,
#[serde(rename = "Standard_S70")]
StandardS70,
#[serde(rename = "Standard_S80")]
StandardS80,
#[serde(rename = "Premium_P60")]
PremiumP60,
#[serde(rename = "Premium_P70")]
PremiumP70,
#[serde(rename = "Premium_P80")]
PremiumP80,
#[serde(rename = "StandardSSD_E10")]
StandardSsdE10,
#[serde(rename = "StandardSSD_E15")]
StandardSsdE15,
#[serde(rename = "StandardSSD_E20")]
StandardSsdE20,
#[serde(rename = "StandardSSD_E30")]
StandardSsdE30,
#[serde(rename = "StandardSSD_E40")]
StandardSsdE40,
#[serde(rename = "StandardSSD_E50")]
StandardSsdE50,
#[serde(rename = "StandardSSD_E60")]
StandardSsdE60,
#[serde(rename = "StandardSSD_E70")]
StandardSsdE70,
#[serde(rename = "StandardSSD_E80")]
StandardSsdE80,
#[serde(rename = "StandardSSD_E4")]
StandardSsdE4,
#[serde(rename = "StandardSSD_E6")]
StandardSsdE6,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Suitability {
Unknown,
NotSuitable,
Suitable,
ConditionallySuitable,
ReadinessUnknown,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityExplanation {
Unknown,
NotApplicable,
DiskSizeGreaterThanSupported,
NoSuitableDiskSizeForIops,
NoSuitableDiskSizeForThroughput,
NoDiskSizeFoundInSelectedLocation,
NoDiskSizeFoundForSelectedRedundancy,
InternalErrorOccurredForDiskEvaluation,
NoEaPriceFoundForDiskSize,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityDetail {
None,
NumberOfReadOperationsPerSecondMissing,
NumberOfWriteOperationsPerSecondMissing,
MegabytesPerSecondOfReadMissing,
MegabytesPerSecondOfWriteMissing,
DiskGigabytesConsumedMissing,
DiskGigabytesProvisionedMissing,
NumberOfReadOperationsPerSecondOutOfRange,
NumberOfWriteOperationsPerSecondOutOfRange,
MegabytesPerSecondOfReadOutOfRange,
MegabytesPerSecondOfWriteOutOfRange,
DiskGigabytesConsumedOutOfRange,
DiskGigabytesProvisionedOutOfRange,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessedNetworkAdapter {
#[serde(rename = "macAddress", default, skip_serializing_if = "Option::is_none")]
pub mac_address: Option<String>,
#[serde(rename = "ipAddresses", default, skip_serializing_if = "Vec::is_empty")]
pub ip_addresses: Vec<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "monthlyBandwidthCosts", default, skip_serializing_if = "Option::is_none")]
pub monthly_bandwidth_costs: Option<f64>,
#[serde(rename = "megabytesPerSecondReceived", default, skip_serializing_if = "Option::is_none")]
pub megabytes_per_second_received: Option<f64>,
#[serde(rename = "megabytesPerSecondTransmitted", default, skip_serializing_if = "Option::is_none")]
pub megabytes_per_second_transmitted: Option<f64>,
#[serde(rename = "netGigabytesTransmittedPerMonth", default, skip_serializing_if = "Option::is_none")]
pub net_gigabytes_transmitted_per_month: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub suitability: Option<assessed_network_adapter::Suitability>,
#[serde(rename = "suitabilityExplanation", default, skip_serializing_if = "Option::is_none")]
pub suitability_explanation: Option<assessed_network_adapter::SuitabilityExplanation>,
#[serde(rename = "suitabilityDetail", default, skip_serializing_if = "Option::is_none")]
pub suitability_detail: Option<assessed_network_adapter::SuitabilityDetail>,
}
pub mod assessed_network_adapter {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Suitability {
Unknown,
NotSuitable,
Suitable,
ConditionallySuitable,
ReadinessUnknown,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityExplanation {
Unknown,
NotApplicable,
InternalErrorOccurred,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityDetail {
None,
MegabytesOfDataTransmittedMissing,
MegabytesOfDataTransmittedOutOfRange,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessedMachine {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AssessedMachineProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessedMachineProperties {
#[serde(rename = "bootType", default, skip_serializing_if = "Option::is_none")]
pub boot_type: Option<assessed_machine_properties::BootType>,
#[serde(rename = "datacenterMachineArmId", default, skip_serializing_if = "Option::is_none")]
pub datacenter_machine_arm_id: Option<String>,
#[serde(rename = "datacenterManagementServerArmId", default, skip_serializing_if = "Option::is_none")]
pub datacenter_management_server_arm_id: Option<String>,
#[serde(rename = "datacenterManagementServerName", default, skip_serializing_if = "Option::is_none")]
pub datacenter_management_server_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "megabytesOfMemory", default, skip_serializing_if = "Option::is_none")]
pub megabytes_of_memory: Option<f64>,
#[serde(rename = "numberOfCores", default, skip_serializing_if = "Option::is_none")]
pub number_of_cores: Option<i32>,
#[serde(rename = "operatingSystemType", default, skip_serializing_if = "Option::is_none")]
pub operating_system_type: Option<String>,
#[serde(rename = "operatingSystemName", default, skip_serializing_if = "Option::is_none")]
pub operating_system_name: Option<String>,
#[serde(rename = "operatingSystemVersion", default, skip_serializing_if = "Option::is_none")]
pub operating_system_version: Option<String>,
#[serde(rename = "monthlyBandwidthCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_bandwidth_cost: Option<f64>,
#[serde(rename = "monthlyStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_storage_cost: Option<f64>,
#[serde(rename = "monthlyPremiumStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_premium_storage_cost: Option<f64>,
#[serde(rename = "monthlyStandardSSDStorageCost", default, skip_serializing_if = "Option::is_none")]
pub monthly_standard_ssd_storage_cost: Option<f64>,
#[serde(rename = "confidenceRatingInPercentage", default, skip_serializing_if = "Option::is_none")]
pub confidence_rating_in_percentage: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disks: Option<serde_json::Value>,
#[serde(rename = "networkAdapters", default, skip_serializing_if = "Option::is_none")]
pub network_adapters: Option<serde_json::Value>,
#[serde(rename = "recommendedSize", default, skip_serializing_if = "Option::is_none")]
pub recommended_size: Option<assessed_machine_properties::RecommendedSize>,
#[serde(rename = "numberOfCoresForRecommendedSize", default, skip_serializing_if = "Option::is_none")]
pub number_of_cores_for_recommended_size: Option<i32>,
#[serde(rename = "megabytesOfMemoryForRecommendedSize", default, skip_serializing_if = "Option::is_none")]
pub megabytes_of_memory_for_recommended_size: Option<f64>,
#[serde(rename = "monthlyComputeCostForRecommendedSize", default, skip_serializing_if = "Option::is_none")]
pub monthly_compute_cost_for_recommended_size: Option<f64>,
#[serde(rename = "percentageCoresUtilization", default, skip_serializing_if = "Option::is_none")]
pub percentage_cores_utilization: Option<f64>,
#[serde(rename = "percentageMemoryUtilization", default, skip_serializing_if = "Option::is_none")]
pub percentage_memory_utilization: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub suitability: Option<assessed_machine_properties::Suitability>,
#[serde(rename = "suitabilityExplanation", default, skip_serializing_if = "Option::is_none")]
pub suitability_explanation: Option<assessed_machine_properties::SuitabilityExplanation>,
#[serde(rename = "suitabilityDetail", default, skip_serializing_if = "Option::is_none")]
pub suitability_detail: Option<assessed_machine_properties::SuitabilityDetail>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
}
pub mod assessed_machine_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum BootType {
Unknown,
#[serde(rename = "EFI")]
Efi,
#[serde(rename = "BIOS")]
Bios,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum RecommendedSize {
Unknown,
#[serde(rename = "Basic_A0")]
BasicA0,
#[serde(rename = "Basic_A1")]
BasicA1,
#[serde(rename = "Basic_A2")]
BasicA2,
#[serde(rename = "Basic_A3")]
BasicA3,
#[serde(rename = "Basic_A4")]
BasicA4,
#[serde(rename = "Standard_A0")]
StandardA0,
#[serde(rename = "Standard_A1")]
StandardA1,
#[serde(rename = "Standard_A2")]
StandardA2,
#[serde(rename = "Standard_A3")]
StandardA3,
#[serde(rename = "Standard_A4")]
StandardA4,
#[serde(rename = "Standard_A5")]
StandardA5,
#[serde(rename = "Standard_A6")]
StandardA6,
#[serde(rename = "Standard_A7")]
StandardA7,
#[serde(rename = "Standard_A8")]
StandardA8,
#[serde(rename = "Standard_A9")]
StandardA9,
#[serde(rename = "Standard_A10")]
StandardA10,
#[serde(rename = "Standard_A11")]
StandardA11,
#[serde(rename = "Standard_A1_v2")]
StandardA1V2,
#[serde(rename = "Standard_A2_v2")]
StandardA2V2,
#[serde(rename = "Standard_A4_v2")]
StandardA4V2,
#[serde(rename = "Standard_A8_v2")]
StandardA8V2,
#[serde(rename = "Standard_A2m_v2")]
StandardA2mV2,
#[serde(rename = "Standard_A4m_v2")]
StandardA4mV2,
#[serde(rename = "Standard_A8m_v2")]
StandardA8mV2,
#[serde(rename = "Standard_D1")]
StandardD1,
#[serde(rename = "Standard_D2")]
StandardD2,
#[serde(rename = "Standard_D3")]
StandardD3,
#[serde(rename = "Standard_D4")]
StandardD4,
#[serde(rename = "Standard_D11")]
StandardD11,
#[serde(rename = "Standard_D12")]
StandardD12,
#[serde(rename = "Standard_D13")]
StandardD13,
#[serde(rename = "Standard_D14")]
StandardD14,
#[serde(rename = "Standard_D1_v2")]
StandardD1V2,
#[serde(rename = "Standard_D2_v2")]
StandardD2V2,
#[serde(rename = "Standard_D3_v2")]
StandardD3V2,
#[serde(rename = "Standard_D4_v2")]
StandardD4V2,
#[serde(rename = "Standard_D5_v2")]
StandardD5V2,
#[serde(rename = "Standard_D11_v2")]
StandardD11V2,
#[serde(rename = "Standard_D12_v2")]
StandardD12V2,
#[serde(rename = "Standard_D13_v2")]
StandardD13V2,
#[serde(rename = "Standard_D14_v2")]
StandardD14V2,
#[serde(rename = "Standard_D15_v2")]
StandardD15V2,
#[serde(rename = "Standard_DS1")]
StandardDs1,
#[serde(rename = "Standard_DS2")]
StandardDs2,
#[serde(rename = "Standard_DS3")]
StandardDs3,
#[serde(rename = "Standard_DS4")]
StandardDs4,
#[serde(rename = "Standard_DS11")]
StandardDs11,
#[serde(rename = "Standard_DS12")]
StandardDs12,
#[serde(rename = "Standard_DS13")]
StandardDs13,
#[serde(rename = "Standard_DS14")]
StandardDs14,
#[serde(rename = "Standard_DS1_v2")]
StandardDs1V2,
#[serde(rename = "Standard_DS2_v2")]
StandardDs2V2,
#[serde(rename = "Standard_DS3_v2")]
StandardDs3V2,
#[serde(rename = "Standard_DS4_v2")]
StandardDs4V2,
#[serde(rename = "Standard_DS5_v2")]
StandardDs5V2,
#[serde(rename = "Standard_DS11_v2")]
StandardDs11V2,
#[serde(rename = "Standard_DS12_v2")]
StandardDs12V2,
#[serde(rename = "Standard_DS13_v2")]
StandardDs13V2,
#[serde(rename = "Standard_DS14_v2")]
StandardDs14V2,
#[serde(rename = "Standard_DS15_v2")]
StandardDs15V2,
#[serde(rename = "Standard_F1")]
StandardF1,
#[serde(rename = "Standard_F2")]
StandardF2,
#[serde(rename = "Standard_F4")]
StandardF4,
#[serde(rename = "Standard_F8")]
StandardF8,
#[serde(rename = "Standard_F16")]
StandardF16,
#[serde(rename = "Standard_F1s")]
StandardF1s,
#[serde(rename = "Standard_F2s")]
StandardF2s,
#[serde(rename = "Standard_F4s")]
StandardF4s,
#[serde(rename = "Standard_F8s")]
StandardF8s,
#[serde(rename = "Standard_F16s")]
StandardF16s,
#[serde(rename = "Standard_G1")]
StandardG1,
#[serde(rename = "Standard_G2")]
StandardG2,
#[serde(rename = "Standard_G3")]
StandardG3,
#[serde(rename = "Standard_G4")]
StandardG4,
#[serde(rename = "Standard_G5")]
StandardG5,
#[serde(rename = "Standard_GS1")]
StandardGs1,
#[serde(rename = "Standard_GS2")]
StandardGs2,
#[serde(rename = "Standard_GS3")]
StandardGs3,
#[serde(rename = "Standard_GS4")]
StandardGs4,
#[serde(rename = "Standard_GS5")]
StandardGs5,
#[serde(rename = "Standard_H8")]
StandardH8,
#[serde(rename = "Standard_H16")]
StandardH16,
#[serde(rename = "Standard_H8m")]
StandardH8m,
#[serde(rename = "Standard_H16m")]
StandardH16m,
#[serde(rename = "Standard_H16r")]
StandardH16r,
#[serde(rename = "Standard_H16mr")]
StandardH16mr,
#[serde(rename = "Standard_L4s")]
StandardL4s,
#[serde(rename = "Standard_L8s")]
StandardL8s,
#[serde(rename = "Standard_L16s")]
StandardL16s,
#[serde(rename = "Standard_L32s")]
StandardL32s,
#[serde(rename = "Standard_D2s_v3")]
StandardD2sV3,
#[serde(rename = "Standard_D4s_v3")]
StandardD4sV3,
#[serde(rename = "Standard_D8s_v3")]
StandardD8sV3,
#[serde(rename = "Standard_D16s_v3")]
StandardD16sV3,
#[serde(rename = "Standard_D32s_v3")]
StandardD32sV3,
#[serde(rename = "Standard_D64s_v3")]
StandardD64sV3,
#[serde(rename = "Standard_D2_v3")]
StandardD2V3,
#[serde(rename = "Standard_D4_v3")]
StandardD4V3,
#[serde(rename = "Standard_D8_v3")]
StandardD8V3,
#[serde(rename = "Standard_D16_v3")]
StandardD16V3,
#[serde(rename = "Standard_D32_v3")]
StandardD32V3,
#[serde(rename = "Standard_D64_v3")]
StandardD64V3,
#[serde(rename = "Standard_F2s_v2")]
StandardF2sV2,
#[serde(rename = "Standard_F4s_v2")]
StandardF4sV2,
#[serde(rename = "Standard_F8s_v2")]
StandardF8sV2,
#[serde(rename = "Standard_F16s_v2")]
StandardF16sV2,
#[serde(rename = "Standard_F32s_v2")]
StandardF32sV2,
#[serde(rename = "Standard_F64s_v2")]
StandardF64sV2,
#[serde(rename = "Standard_F72s_v2")]
StandardF72sV2,
#[serde(rename = "Standard_E2_v3")]
StandardE2V3,
#[serde(rename = "Standard_E4_v3")]
StandardE4V3,
#[serde(rename = "Standard_E8_v3")]
StandardE8V3,
#[serde(rename = "Standard_E16_v3")]
StandardE16V3,
#[serde(rename = "Standard_E32_v3")]
StandardE32V3,
#[serde(rename = "Standard_E64_v3")]
StandardE64V3,
#[serde(rename = "Standard_E2s_v3")]
StandardE2sV3,
#[serde(rename = "Standard_E4s_v3")]
StandardE4sV3,
#[serde(rename = "Standard_E8s_v3")]
StandardE8sV3,
#[serde(rename = "Standard_E16s_v3")]
StandardE16sV3,
#[serde(rename = "Standard_E32s_v3")]
StandardE32sV3,
#[serde(rename = "Standard_E64s_v3")]
StandardE64sV3,
#[serde(rename = "Standard_M64s")]
StandardM64s,
#[serde(rename = "Standard_M64ms")]
StandardM64ms,
#[serde(rename = "Standard_M128s")]
StandardM128s,
#[serde(rename = "Standard_M128ms")]
StandardM128ms,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Suitability {
Unknown,
NotSuitable,
Suitable,
ConditionallySuitable,
ReadinessUnknown,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityExplanation {
Unknown,
NotApplicable,
GuestOperatingSystemArchitectureNotSupported,
GuestOperatingSystemNotSupported,
BootTypeNotSupported,
MoreDisksThanSupported,
NoSuitableVmSizeFound,
OneOrMoreDisksNotSuitable,
OneOrMoreAdaptersNotSuitable,
InternalErrorOccurredDuringComputeEvaluation,
InternalErrorOccurredDuringStorageEvaluation,
InternalErrorOccurredDuringNetworkEvaluation,
NoVmSizeSupportsStoragePerformance,
NoVmSizeSupportsNetworkPerformance,
NoVmSizeForSelectedPricingTier,
NoVmSizeForSelectedAzureLocation,
CheckRedHatLinuxVersion,
CheckOpenSuseLinuxVersion,
CheckWindowsServer2008R2Version,
CheckCentOsVersion,
CheckDebianLinuxVersion,
CheckSuseLinuxVersion,
CheckOracleLinuxVersion,
CheckUbuntuLinuxVersion,
CheckCoreOsLinuxVersion,
WindowsServerVersionConditionallySupported,
NoGuestOperatingSystemConditionallySupported,
WindowsClientVersionsConditionallySupported,
BootTypeUnknown,
GuestOperatingSystemUnknown,
WindowsServerVersionsSupportedWithCaveat,
#[serde(rename = "WindowsOSNoLongerUnderMSSupport")]
WindowsOsNoLongerUnderMsSupport,
EndorsedWithConditionsLinuxDistributions,
UnendorsedLinuxDistributions,
NoVmSizeForStandardPricingTier,
NoVmSizeForBasicPricingTier,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SuitabilityDetail {
None,
RecommendedSizeHasLessNetworkAdapters,
CannotReportComputeCost,
CannotReportStorageCost,
CannotReportBandwidthCosts,
PercentageOfCoresUtilizedMissing,
PercentageOfMemoryUtilizedMissing,
PercentageOfCoresUtilizedOutOfRange,
PercentageOfMemoryUtilizedOutOfRange,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DownloadUrl {
#[serde(rename = "assessmentReportUrl", default, skip_serializing_if = "Option::is_none")]
pub assessment_report_url: Option<String>,
#[serde(rename = "expirationTime", default, skip_serializing_if = "Option::is_none")]
pub expiration_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProjectResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Project>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Machine>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Group>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessedMachineResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<AssessedMachine>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessmentResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Assessment>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudErrorBody {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<CloudErrorBody>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CollectorBodyAgentSpnProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub authority: Option<String>,
#[serde(rename = "applicationId", default, skip_serializing_if = "Option::is_none")]
pub application_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub audience: Option<String>,
#[serde(rename = "objectId", default, skip_serializing_if = "Option::is_none")]
pub object_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HyperVCollector {
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CollectorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CollectorProperties {
#[serde(rename = "discoverySiteId", default, skip_serializing_if = "Option::is_none")]
pub discovery_site_id: Option<String>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
#[serde(rename = "agentProperties", default, skip_serializing_if = "Option::is_none")]
pub agent_properties: Option<CollectorAgentProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CollectorAgentProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(rename = "lastHeartbeatUtc", default, skip_serializing_if = "Option::is_none")]
pub last_heartbeat_utc: Option<String>,
#[serde(rename = "spnDetails", default, skip_serializing_if = "Option::is_none")]
pub spn_details: Option<CollectorBodyAgentSpnProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportCollectorProperties {
#[serde(rename = "discoverySiteId", default, skip_serializing_if = "Option::is_none")]
pub discovery_site_id: Option<String>,
#[serde(rename = "createdTimestamp", default, skip_serializing_if = "Option::is_none")]
pub created_timestamp: Option<String>,
#[serde(rename = "updatedTimestamp", default, skip_serializing_if = "Option::is_none")]
pub updated_timestamp: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VMwareCollector {
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CollectorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerCollector {
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<CollectorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportCollector {
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ImportCollectorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HyperVCollectorList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<HyperVCollector>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VMwareCollectorList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VMwareCollector>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerCollectorList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ServerCollector>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImportCollectorList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ImportCollector>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VmFamily {
#[serde(rename = "familyName", default, skip_serializing_if = "Option::is_none")]
pub family_name: Option<String>,
#[serde(rename = "targetLocations", default, skip_serializing_if = "Vec::is_empty")]
pub target_locations: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub category: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessmentOptionsProperties {
#[serde(rename = "vmFamilies", default, skip_serializing_if = "Vec::is_empty")]
pub vm_families: Vec<VmFamily>,
#[serde(rename = "reservedInstanceVmFamilies", default, skip_serializing_if = "Vec::is_empty")]
pub reserved_instance_vm_families: Vec<String>,
#[serde(rename = "reservedInstanceSupportedLocations", default, skip_serializing_if = "Vec::is_empty")]
pub reserved_instance_supported_locations: Vec<String>,
#[serde(rename = "reservedInstanceSupportedCurrencies", default, skip_serializing_if = "Vec::is_empty")]
pub reserved_instance_supported_currencies: Vec<String>,
#[serde(rename = "reservedInstanceSupportedOffers", default, skip_serializing_if = "Vec::is_empty")]
pub reserved_instance_supported_offers: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessmentOptions {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
pub properties: AssessmentOptionsProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssessmentOptionsResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<AssessmentOptions>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationResultList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "eTag", default, skip_serializing_if = "Option::is_none")]
pub e_tag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
pub properties: PrivateEndpointConnectionProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<private_endpoint_connection_properties::ProvisioningState>,
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<ResourceId>,
#[serde(rename = "privateLinkServiceConnectionState", default, skip_serializing_if = "Option::is_none")]
pub private_link_service_connection_state: Option<PrivateLinkServiceConnectionState>,
}
pub mod private_endpoint_connection_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Accepted,
InProgress,
Succeeded,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceId {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<private_link_service_connection_state::Status>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
pub mod private_link_service_connection_state {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Approved,
Pending,
Rejected,
Disconnected,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
|
use crate::{
input::{
mouse::{EditorMouseState, MouseKeys, ScrollDelta, ViewportPosition},
InputPreprocessorMessage, ModifierKeys,
},
message_prelude::{Message, ToolMessage},
tool::ToolType,
Editor,
};
use graphene::color::Color;
/// A set of utility functions to make the writing of editor test more declarative
pub trait EditorTestUtils {
fn draw_rect(&mut self, x1: f64, y1: f64, x2: f64, y2: f64);
fn draw_shape(&mut self, x1: f64, y1: f64, x2: f64, y2: f64);
fn draw_ellipse(&mut self, x1: f64, y1: f64, x2: f64, y2: f64);
/// Select given tool and drag it from (x1, y1) to (x2, y2)
fn drag_tool(&mut self, typ: ToolType, x1: f64, y1: f64, x2: f64, y2: f64);
fn move_mouse(&mut self, x: f64, y: f64);
fn mousedown(&mut self, state: EditorMouseState);
fn mouseup(&mut self, state: EditorMouseState);
fn lmb_mousedown(&mut self, x: f64, y: f64);
fn input(&mut self, message: InputPreprocessorMessage);
fn select_tool(&mut self, typ: ToolType);
fn select_primary_color(&mut self, color: Color);
}
impl EditorTestUtils for Editor {
fn draw_rect(&mut self, x1: f64, y1: f64, x2: f64, y2: f64) {
self.drag_tool(ToolType::Rectangle, x1, y1, x2, y2);
}
fn draw_shape(&mut self, x1: f64, y1: f64, x2: f64, y2: f64) {
self.drag_tool(ToolType::Shape, x1, y1, x2, y2);
}
fn draw_ellipse(&mut self, x1: f64, y1: f64, x2: f64, y2: f64) {
self.drag_tool(ToolType::Ellipse, x1, y1, x2, y2);
}
fn drag_tool(&mut self, typ: ToolType, x1: f64, y1: f64, x2: f64, y2: f64) {
self.select_tool(typ);
self.move_mouse(x1, y1);
self.lmb_mousedown(x1, y1);
self.move_mouse(x2, y2);
self.mouseup(EditorMouseState {
editor_position: (x2, y2).into(),
mouse_keys: MouseKeys::empty(),
scroll_delta: ScrollDelta::default(),
});
}
fn move_mouse(&mut self, x: f64, y: f64) {
let mut editor_mouse_state = EditorMouseState::new();
editor_mouse_state.editor_position = ViewportPosition::new(x, y);
self.input(InputPreprocessorMessage::MouseMove(editor_mouse_state, ModifierKeys::default()));
}
fn mousedown(&mut self, state: EditorMouseState) {
self.input(InputPreprocessorMessage::MouseDown(state, ModifierKeys::default()));
}
fn mouseup(&mut self, state: EditorMouseState) {
self.handle_message(InputPreprocessorMessage::MouseUp(state, ModifierKeys::default())).unwrap()
}
fn lmb_mousedown(&mut self, x: f64, y: f64) {
self.mousedown(EditorMouseState {
editor_position: (x, y).into(),
mouse_keys: MouseKeys::LEFT,
scroll_delta: ScrollDelta::default(),
})
}
fn input(&mut self, message: InputPreprocessorMessage) {
self.handle_message(Message::InputPreprocessor(message)).unwrap();
}
fn select_tool(&mut self, typ: ToolType) {
self.handle_message(Message::Tool(ToolMessage::SelectTool(typ))).unwrap();
}
fn select_primary_color(&mut self, color: Color) {
self.handle_message(Message::Tool(ToolMessage::SelectPrimaryColor(color))).unwrap();
}
}
|
#[derive(Debug)]
enum UsState{
Alabama,
Alaska,
Vermont,
}
enum Coin {
Penny,
Nickel,
Dime,
Quarter(UsState),
}
fn value_in_cents(coin: Coin) -> u32 {
match coin{
Coin::Penny => {
println!("Lucky penny");
1
},
Coin::Nickel => 5,
Coin::Dime => 10,
//&Coin::Quarter(_) => 25,
Coin::Quarter(state) => {
println!("State quarter is from {:?}", state);
25
},
}
}
fn main() {
//let my_coin = Coin::Penny;
let my_coin = Coin::Quarter(UsState::Vermont);
let my_coin_value: u32;
my_coin_value = value_in_cents(my_coin);
println!("The value of my coin is: {}", my_coin_value);
println!("*****************************");
let some_u32_v: u32 = 4;
let some_u32_v2 = 5u32;
println!("some_u32_v: {}", some_u32_v);
println!("some_u32_v2: {}", some_u32_v2);
let some_u8_value = 0u8;
println!("some_u8_value: {}", some_u8_value);
match some_u8_value {
1 => println!("one"),
3 => println!("three"),
5 => println!("five"),
7 => println!("seven"),
_ => (),
}
}
|
pub mod delay;
pub mod peak_level_detector;
|
// q0216_combination_sum_iii
struct Solution;
// impl Solution {
// pub fn combination_sum3(k: i32, n: i32) -> Vec<Vec<i32>> {
// if (1+k)*k / 2 > n {
// return vec![];
// }
// if k == 1 {
// if n > 0 && n < 10 {
// return vec![vec![n]];
// }else {
// return vec![];
// }
// }
// let mut ret = vec![];
// for i in 1..=9 {
// for mut r in Solution::combination_sum3(k-1, n-i) {
// if r.contains(&i) {
// continue;
// }
// r.push(i);
// r.sort_unstable();
// if ret.contains(&r) {
// continue;
// }
// ret.push(r);
// }
// }
// ret
// }
// }
impl Solution {
pub fn combination_sum3(k: i32, n: i32) -> Vec<Vec<i32>> {
let bottom = (1 + k) * k / 2;
if bottom > n {
return vec![];
}
let top = (9 + 10 - k) * k / 2;
if top < n {
return vec![];
}
let mut ret: Vec<Vec<i32>> = vec![(1..=k).into_iter().collect()];
let k = k as usize;
for _ in 0..n - bottom {
let mut tmp = vec![];
for v in ret.iter() {
if v[k - 1] != 9 {
let mut t = v.clone();
t[k - 1] += 1;
if !tmp.contains(&t) {
tmp.push(t);
}
}
for i in 2..=k {
if v[k - i] != 9 && v[k - i + 1] - v[k - i] > 1 {
let mut t = v.clone();
t[k - i] += 1;
if !tmp.contains(&t) {
tmp.push(t);
}
}
}
}
// println!("{:?}-----{:?}", ret, tmp);
std::mem::swap(&mut ret, &mut tmp);
}
ret
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
// assert_eq!( vec![vec![1]], Solution::combination_sum3(1, 1));
// assert_eq!( Vec::<Vec<i32>>::new(), Solution::combination_sum3(1, 18));
// assert_eq!( vec![vec![1,2,4]], Solution::combination_sum3(3, 7));
// assert_eq!( vec![vec![1,2,3,4,5,6,7,8,9]], Solution::combination_sum3(9, 45));
assert_eq!(
vec![vec![1, 5], vec![2, 4]],
Solution::combination_sum3(2, 6)
);
}
}
|
mod lunch;
//use lunch::menu;
//use lunch::menu::dinner;
use lunch::menu::{self,dinner};
fn main() {
println!("We need Food");
lunch::menu::dinner();
menu::dinner();
dinner(); // idiomatic path
}
// Library
// Github Account
// Git install
// Smart Git/ Git command (terminal)
// firstwelcome clone
// Readme
// Cargo.toml |
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool};
use tonic::Status;
use tracing::instrument;
use crate::api::permission_request::IdOrName;
use crate::api::PermissionData;
use crate::schema::permissions;
use crate::schema::permissions::dsl::*;
use crate::storage::helpers::sql_err_to_grpc_error;
#[derive(Queryable, Default, Debug)]
pub struct Permission {
pub id: i32,
pub name: String,
pub description: String,
}
#[derive(Insertable)]
#[diesel(table_name = permissions)]
pub struct NewPermission<'a> {
pub name: &'a str,
pub description: &'a str,
}
#[derive(AsChangeset, Default)]
#[diesel(table_name = permissions)]
pub struct UpdatePermission {
pub name: Option<String>,
pub description: Option<String>,
}
impl From<Permission> for PermissionData {
fn from(p: Permission) -> PermissionData {
PermissionData {
id: Some(p.id),
name: p.name,
description: p.description,
}
}
}
impl From<&Permission> for PermissionData {
fn from(p: &Permission) -> PermissionData {
PermissionData {
id: Some(p.id),
name: p.name.clone(),
description: p.description.clone(),
}
}
}
impl Permission {
#[instrument]
pub async fn all(
pool: &Pool<ConnectionManager<PgConnection>>,
) -> Result<Vec<PermissionData>, Status> {
let conn = &mut pool.get().unwrap();
match permissions.load::<Permission>(conn) {
Ok(results) => Ok(results.iter().map(|t| t.into()).collect()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn get(
pool: &Pool<ConnectionManager<PgConnection>>,
id_or_name: &IdOrName,
) -> Result<PermissionData, Status> {
let conn = &mut pool.get().unwrap();
match id_or_name {
IdOrName::Id(user_id) => match permissions.find(user_id).first::<Permission>(conn) {
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
},
IdOrName::Name(permission_name) => {
match permissions
.filter(name.eq(permission_name))
.first::<Permission>(conn)
{
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
}
}
#[instrument]
pub async fn add(
pool: &Pool<ConnectionManager<PgConnection>>,
permission_data: PermissionData,
) -> Result<PermissionData, Status> {
let new_user = NewPermission {
name: permission_data.name.as_str(),
description: permission_data.description.as_str(),
};
let conn = &mut pool.get().unwrap();
match diesel::insert_into(permissions)
.values(&new_user)
.get_result::<Permission>(conn)
{
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn update(
pool: &Pool<ConnectionManager<PgConnection>>,
permission_data: PermissionData,
) -> Result<PermissionData, Status> {
let conn = &mut pool.get().unwrap();
let mut update = UpdatePermission::default();
if permission_data.id.is_none() {
return Err(Status::invalid_argument("Permission id is required"));
}
if !permission_data.name.is_empty() {
update.name = Some(permission_data.name);
}
if !permission_data.description.is_empty() {
update.description = Some(permission_data.description)
}
match diesel::update(permissions.find(permission_data.id.unwrap()))
.set(update)
.get_result::<Permission>(conn)
{
Ok(results) => Ok(results.into()),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
#[instrument]
pub async fn delete(
pool: &Pool<ConnectionManager<PgConnection>>,
id_or_name: IdOrName,
) -> Result<usize, Status> {
let conn = &mut pool.get().unwrap();
match id_or_name {
IdOrName::Id(permission_id) => {
match diesel::delete(permissions.find(permission_id)).execute(conn) {
Ok(results) => Ok(results),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
IdOrName::Name(permission_name) => {
match diesel::delete(permissions.filter(name.eq(permission_name))).execute(conn) {
Ok(results) => Ok(results),
Err(err) => Err(sql_err_to_grpc_error(err)),
}
}
}
}
}
|
use serde_json::Value;
use util::{JsonType, JsonValueExt};
use errors::{ErrorKind, ValidationError};
use schema::{Context, Schema, SchemaBase};
/// Schema for JSON arrays like `[1, 2, 3]`.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
pub struct ArraySchema {
description: Option<String>,
id: Option<String>,
title: Option<String>,
min_items: Option<usize>,
max_items: Option<usize>,
unique_items: Option<bool>,
items: Option<Items>,
additional_items: Option<bool>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
enum Items {
List(Box<Schema>),
Tuple(Vec<Schema>),
}
impl ArraySchema {
fn additional_items(&self) -> bool {
self.additional_items.unwrap_or(false)
}
fn unique_items(&self) -> bool {
self.unique_items.unwrap_or(false)
}
fn validate_size<'json>(
&self,
array: &'json [Value],
parent: &'json Value,
errors: &mut Vec<ValidationError<'json>>,
) {
if let Some(min) = self.min_items {
if array.len() < min {
errors.push(ValidationError {
reason: ErrorKind::MinLength {
expected: min,
found: array.len(),
},
node: parent,
});
}
}
if let Some(max) = self.max_items {
if array.len() > max {
errors.push(ValidationError {
reason: ErrorKind::MaxLength {
expected: max,
found: array.len(),
},
node: parent,
});
}
}
}
fn validate_items<'json>(
&self,
ctx: &Context,
array: &'json [Value],
parent: &'json Value,
errors: &mut Vec<ValidationError<'json>>,
) {
if let Some(ref items) = self.items {
match *items {
Items::Tuple(ref schemas) => {
if schemas.len() != array.len() && !self.additional_items() {
errors.push(ValidationError {
reason: ErrorKind::TupleLengthMismatch {
schemas: schemas.len(),
tuple: array.len(),
},
node: parent,
});
}
for (schema, value) in schemas.iter().zip(array) {
schema.validate_inner(ctx, value, errors);
}
}
Items::List(ref schema) => for value in array {
schema.validate_inner(ctx, value, errors);
},
}
}
}
fn validate_unique<'json>(
&self,
array: &'json [Value],
parent: &'json Value,
errors: &mut Vec<ValidationError<'json>>,
) {
if self.unique_items() {
let mut unique_items = vec![];
for item in array {
for contained in &unique_items {
if *contained == item {
errors.push(ValidationError {
node: parent,
reason: ErrorKind::ArrayItemNotUnique,
});
return;
}
}
unique_items.push(item);
}
}
}
}
impl SchemaBase for ArraySchema {
#[doc(hidden)]
fn validate_inner<'json>(
&self,
ctx: &Context,
value: &'json Value,
errors: &mut Vec<ValidationError<'json>>,
) {
match value {
&Value::Array(ref array) => {
self.validate_size(array, value, errors);
self.validate_items(ctx, array, value, errors);
self.validate_unique(array, value, errors);
}
val => errors.push(ValidationError::type_mismatch(
val,
JsonType::Array,
val.get_type(),
)),
}
}
}
/// A builder for creating array schemas programatically.
#[derive(Debug)]
pub struct ArraySchemaBuilder {
description: Option<String>,
id: Option<String>,
title: Option<String>,
min_items: Option<usize>,
max_items: Option<usize>,
unique_items: bool,
items: Option<Items>,
additional_items: bool,
}
impl Default for ArraySchemaBuilder {
fn default() -> ArraySchemaBuilder {
ArraySchemaBuilder {
description: None,
id: None,
title: None,
min_items: None,
max_items: None,
unique_items: false,
items: Default::default(),
additional_items: true,
}
}
}
impl ArraySchemaBuilder {
/// Sets the description.
pub fn description<V: Into<String>>(mut self, value: V) -> Self {
self.description = Some(value.into());
self
}
/// Sets the ID.
pub fn id<V: Into<String>>(mut self, value: V) -> Self {
self.id = Some(value.into());
self
}
/// Sets the title.
pub fn title<V: Into<String>>(mut self, value: V) -> Self {
self.title = Some(value.into());
self
}
/// Set the minimum number of items this array must have.
pub fn min_items(mut self, value: usize) -> Self {
self.min_items = Some(value);
self
}
/// Set the maximum number of items this array may have.
pub fn max_items(mut self, value: usize) -> Self {
self.max_items = Some(value);
self
}
/// Make it so array items have to be unique.
pub fn unique_items(mut self) -> Self {
self.unique_items = true;
self
}
/// Set a schema that every item must conform to. (list validation)
pub fn all_items_schema<V: Into<Schema>>(mut self, value: V) -> Self {
self.items = Some(Items::List(Box::new(value.into())));
self
}
/// Set a list of schemas that each item must conform to. (tuple validation)
pub fn item_schemas<V: Into<Vec<Schema>>>(mut self, value: V) -> Self {
self.items = Some(Items::Tuple(value.into()));
self
}
/// Set whether additional items are allowed (tuple validation).
pub fn additional_items(mut self, value: bool) -> Self {
self.additional_items = value;
self
}
/// Returns the finished `Schema`.
pub fn build(self) -> Schema {
From::from(ArraySchema {
description: self.description,
id: self.id,
title: self.title,
min_items: self.min_items,
max_items: self.max_items,
unique_items: Some(self.unique_items),
items: self.items,
additional_items: Some(self.additional_items),
})
}
}
#[cfg(test)]
mod tests {
use serde_json;
use super::*;
use errors::ErrorKind;
use number::NumberSchemaBuilder;
#[test]
fn unique_elements() {
let schema = ArraySchemaBuilder::default().unique_items().build();
let input = serde_json::from_str("[1, 1, 2, 3, 4]").unwrap();
let errors = schema.validate(&input).unwrap_err().0;
assert_eq!(errors.len(), 1);
if let ErrorKind::ArrayItemNotUnique = errors[0].reason {
} else {
assert!(false, "Wrong error reason");
}
}
#[test]
fn default_schema() {
let schema = ArraySchemaBuilder::default().build();
let input = serde_json::from_str(r#"[1, "a", "b", {"test": 123}, []]"#).unwrap();
let result = schema.validate(&input);
assert!(result.is_ok());
}
#[test]
fn subschema() {
let input = serde_json::from_str(r#"[[], 1.2, 1.4, 1.9, 2.5]"#).unwrap();
let item_schema = NumberSchemaBuilder::default()
.minimum(1.0)
.maximum(2.0)
.build();
let schema = ArraySchemaBuilder::default()
.all_items_schema(item_schema)
.build();
let errors = schema.validate(&input).unwrap_err().0;
assert_eq!(errors.len(), 2);
assert_eq!(*errors[0].node, input[0]);
if let ErrorKind::NumberRange { value, bound } = errors[1].reason {
assert_eq!(value, 2.5);
assert_eq!(bound, 2.0);
} else {
assert!(false, "Wrong property");
}
}
}
|
mod tokens;
mod scanner;
mod symbols;
#[macro_use]
extern crate derive_more;
use std::env::args;
use std::process::exit;
use std::fs::File;
use std::io::prelude::Read;
use std::io;
use std::io::BufRead;
use scanner::Scanner;
fn run(source: String) -> bool {
let scanner = Scanner::new(source);
let tokens = scanner.scan_tokens();
for token in tokens {
println!("{}", token);
};
true
}
fn error(line: i32, message: String) {
report(line, "", message);
}
fn report(line: i32, at: &str, message: String) {
println!("[line {}] Error{}: {}", line, at, message);
}
fn run_file(path: String) {
let file = File::open(path).expect("File not found");
let mut source = String::new();
file.read_to_string(&mut source).expect("Could not read file");
let error = run(source);
if error {
exit(65);
}
}
fn run_prompt() -> io::Result<()> {
let mut buffer = String::new();
let stdin = io::stdin();
let mut handle = stdin.lock();
loop {
print!("> ");
handle.read_line(&mut buffer);
run(buffer);
}
}
fn main() {
let args = args();
if args.len() > 1 {
println!("Usage: jlox [script]");
exit(64);
} else if args.len() == 1 {
run_file(args.next());
} else {
run_prompt();
}
}
|
// Copyright 2018 Mohammad Rezaei.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
#![feature(test)]
extern crate test;
extern crate thincollections;
extern crate rand;
extern crate xoshiro;
use thincollections::thin_map::ThinMap;
use thincollections::cla_map::ClaMap;
use std::collections::HashMap;
use test::Bencher;
use test::stats::Summary;
use test::black_box;
use rand::*;
use xoshiro::Xoshiro512StarStar;
use thincollections::thin_hasher::TrivialOneFieldHasherBuilder;
fn create_rand_vec(size: i64) -> Vec<i64> {
let mut rng1 = Xoshiro512StarStar::from_seed_u64(0x1234_5678_9ABC_DEF1);
let mut vec = Vec::with_capacity(size as usize);
for _i in 0..size {
vec.push(rng1.next_u64() as i64);
}
vec
}
#[bench]
fn benchmdr_thin64_get(b: &mut Bencher) {
let points = determine_points(4_000_000);
let src = create_rand_vec(*points.last().unwrap() as i64);
let mut rng1 = Xoshiro512StarStar::from_seed_u64(0x1234_5678_9ABC_DEF1);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
let map = create_thin64_from_vec(*p, &src);
let mut get_src = src.clone();
get_src.truncate(*p as usize);
rng1.shuffle(&mut get_src);
b.iter(|| get_thin64_from_vec(&map, &get_src));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("ThinMap rnd 64 get");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmdr_std64_get(b: &mut Bencher) {
let points = determine_points(4_000_000);
let src = create_rand_vec(*points.last().unwrap() as i64);
let mut rng1 = Xoshiro512StarStar::from_seed_u64(0x1234_5678_9ABC_DEF1);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
let map = create_std64_from_vec(*p, &src);
let mut get_src = src.clone();
get_src.truncate(*p as usize);
rng1.shuffle(&mut get_src);
b.iter(|| get_std64_from_vec(&map, &get_src));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("HashMap rnd 64 get");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmdr_thin64_insert(b: &mut Bencher) {
let points = determine_points(4_000_000);
let src = create_rand_vec(*points.last().unwrap() as i64);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
b.iter(|| create_thin64_from_vec(*p, &src));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("ThinMap rnd 64 insert");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmdr_std64_insert(b: &mut Bencher) {
let points = determine_points(4_000_000);
let src = create_rand_vec(*points.last().unwrap() as i64);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
b.iter(|| create_std64_from_vec(*p, &src));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("HashMap rnd 64 insert");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmds_thin64_insert(b: &mut Bencher) {
let points = determine_points(4_000_000);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
b.iter(|| create_thin(*p, 0));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("ThinMap seq 64 insert");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmds_std64_insert(b: &mut Bencher) {
let points = determine_points(4_000_000);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
b.iter(|| create_std(*p, 0));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("HashMap seq insert");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmds_thin64_get(b: &mut Bencher) {
let points = determine_points(4_000_000);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
let map = create_thin(*p, 0);
b.iter(|| get_seq_thin64_var(&map, *p, 0));
black_box(map.len());
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("ThinMap seq get");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmds_std64_get(b: &mut Bencher) {
let points = determine_points(4_000_000);
let mut insert_result: Vec<(u64, f64)> = Vec::new();
for p in points.iter() {
let mut b = b.clone();
let map = create_std(*p, 0);
b.iter(|| get_seq_std64_var(&map, *p, 0));
black_box(map.len());
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
insert_result.push((*p, (*p as f64) * 1000.0 / summary.median)); // throughput in millions/sec
}
println!("HashMap seq get");
for x in insert_result.iter() {
println!("{}, {}", (*x).0, (*x).1);
}
}
#[bench]
fn benchmpsa_thin_insert(b: &mut Bencher) {
let size = 1_500_000;
println!("ThinMap seq shifted insert");
for p in 0..20 {
let mut b = b.clone();
b.iter(|| create_thin_triv(size, p));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
println!("{}, {}", p as u32, (size as f64) * 1000.0 / summary.median); // throughput in millions/sec
}
}
#[bench]
fn benchmpsa_std_insert(b: &mut Bencher) {
let size = 1_500_000;
println!("HashMap seq shifted insert");
for p in 0..20 {
let mut b = b.clone();
b.iter(|| create_std_triv(size, p));
let summary: Summary = b.bench(|b: &mut Bencher| ()).unwrap();
println!("{}, {}", p as u32, (size as f64) * 1000.0 / summary.median); // throughput in millions/sec
}
}
fn get_thin64_from_vec(map: &ThinMap<i64, u64>, keys: &[i64]) {
let mut sum = 0;
for x in keys.iter() {
sum += map.get(x).unwrap();
}
black_box(sum);
}
fn get_std64_from_vec(map: &HashMap<i64, u64>, keys: &[i64]) {
let mut sum = 0;
for x in keys.iter() {
let option = map.get(x);
if option.is_none() {
println!("looking for {} in {:?} out of {:?}", *x, map, keys);
}
sum += option.unwrap();
}
black_box(sum);
}
fn inserts_vec_thin(v: &Vec<i64>) {
let mut map: ThinMap<i64, u64> = ThinMap::new();
for i in v.iter() {
map.insert(*i, 1);
}
black_box(map.len());
}
fn inserts_vec_std(v: &Vec<i64>) {
let mut map: HashMap<i64, u64> = HashMap::new();
for i in v.iter() {
map.insert(*i, 1);
}
black_box(map.len());
}
fn create_thin(size: u64, shift: u64) -> ThinMap<i64, u64> {
let mut thin_map = ThinMap::new();
let mut c = 0;
let x = size as i64;
while c < x {
thin_map.insert(c << shift, c as u64);
c = c + 1;
}
thin_map
}
fn create_thin_triv(size: u64, shift: u64) -> ThinMap<i64, u64, TrivialOneFieldHasherBuilder> {
let mut thin_map = ThinMap::with_hasher(TrivialOneFieldHasherBuilder::new());
let mut c = 0;
let x = size as i64;
while c < x {
thin_map.insert(c << shift, c as u64);
c = c + 1;
}
thin_map
}
fn create_thin64_from_vec(size: u64, v: &[i64]) -> ThinMap<i64, u64> {
let mut thin_map = ThinMap::new();
let mut c = 0;
let x = size as i64;
let mut it = v.iter();
while c < x {
thin_map.insert(*it.next().unwrap(), c as u64);
c = c + 1;
}
thin_map
}
fn create_std64_from_vec(size: u64, v: &[i64]) -> HashMap<i64, u64> {
let mut std_map = HashMap::new();
let mut c = 0;
let x = size as i64;
let mut it = v.iter();
while c < x {
std_map.insert(*it.next().unwrap(), c as u64);
c = c + 1;
}
std_map
}
fn inserts_seq_std64_var(size: i64, shift: u64) {
let mut hash_map = create_std(size as u64, shift);
black_box(hash_map.len());
}
fn create_std(size: u64, shift: u64) -> HashMap<i64, u64> {
let mut hash_map = HashMap::new();
let mut c = 0;
let x = size as i64;
while c < x {
hash_map.insert(c << shift, c as u64);
c = c + 1;
}
hash_map
}
fn create_std_triv(size: u64, shift: u64) -> HashMap<i64, u64, TrivialOneFieldHasherBuilder> {
let mut hash_map = HashMap::with_hasher(TrivialOneFieldHasherBuilder::new());
let mut c = 0;
let x = size as i64;
while c < x {
hash_map.insert(c << shift, c as u64);
c = c + 1;
}
hash_map
}
fn get_seq_thin64_var(map: &ThinMap<i64, u64>, size: u64, shift: u64) {
let mut c = 1;
let mut x = 0;
let y = size as i64;
while c < y {
x += map.get(&c).unwrap();
c = c + 1;
}
}
fn get_seq_std64_var(map: &HashMap<i64, u64>, size: u64, shift: u64) {
let mut c = 1;
let mut x = 0;
let y = size as i64;
while c < y {
x += map.get(&c).unwrap();
c = c + 1;
}
}
fn large_inserts_seq_cla() -> Option<u64> {
let mut cla_map = ClaMap::new();
let mut c = 1;
while c < 1_000_000 {
cla_map.insert(c, c as u64);
c = c + 1;
}
cla_map.insert(1, 100)
}
fn determine_points(max: u64) -> Vec<u64> {
let mut thin_map: ThinMap<i64, u64> = ThinMap::with_capacity(10);
let mut thin_points: Vec<u64> = Vec::new();
let mut cur_cap = thin_map.capacity();
thin_points.push(cur_cap as u64);
while cur_cap < max as usize {
while thin_map.capacity() <= cur_cap {
let x = thin_map.len();
thin_map.insert(x as i64, 1);
}
cur_cap = thin_map.capacity();
thin_points.push(cur_cap as u64);
}
let mut hash_map: HashMap<i64, u64> = HashMap::with_capacity(10);
let mut hash_points: Vec<u64> = Vec::new();
let mut cur_cap = hash_map.capacity();
hash_points.push(cur_cap as u64);
while cur_cap < max as usize {
while hash_map.capacity() <= cur_cap {
let x = hash_map.len();
hash_map.insert(x as i64, 1);
}
cur_cap = hash_map.capacity();
hash_points.push(cur_cap as u64);
}
let mut result = Vec::new();
calc_points(&thin_points, &mut result);
calc_points(&hash_points, &mut result);
result.sort();
result
}
fn calc_points(thin_points: &[u64], result: &mut Vec<u64>) {
// let mut prev = 0;
for i in thin_points.iter() {
result.push(i * 95 / 100);
result.push(i * 105 / 100);
// if prev != 0 {
// result.push((i + prev) / 2);
// }
// prev = *i;
}
}
|
use std::env;
fn main() {
println!("cargo:rustc-link-search=native=./clib");
println!("cargo:libdir=./clib");
}
|
use crate::{
widget,
widget::{
unit::image::{ImageBoxAspectRatio, ImageBoxMaterial, ImageBoxNode, ImageBoxSizeValue},
utils::Transform,
},
widget_component,
};
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ImageBoxProps {
#[serde(default)]
pub width: ImageBoxSizeValue,
#[serde(default)]
pub height: ImageBoxSizeValue,
#[serde(default)]
pub content_keep_aspect_ratio: Option<ImageBoxAspectRatio>,
#[serde(default)]
pub material: ImageBoxMaterial,
#[serde(default)]
pub transform: Transform,
}
implement_props_data!(ImageBoxProps, "ImageBoxProps");
widget_component! {
pub image_box(id, props) {
let ImageBoxProps {
width,
height,
content_keep_aspect_ratio,
material,
transform,
} = props.read_cloned_or_default();
widget! {{{
ImageBoxNode {
id: id.to_owned(),
props: props.clone(),
width,
height,
content_keep_aspect_ratio,
material,
transform,
}
}}}
}
}
|
use super::in_game::InGameScreen;
use super::screen::Screen;
use crate::player::check_multiple_pressed;
use quicksilver::geom::Rectangle;
use quicksilver::geom::Transform;
use quicksilver::graphics::Font;
use quicksilver::graphics::FontStyle;
use quicksilver::graphics::Image;
use quicksilver::input::Key;
use quicksilver::lifecycle::Window;
use quicksilver::prelude::Img;
use quicksilver::Result;
pub struct DeadScreen {
rendered_score: Image,
rendered_dead_text: Image,
}
impl DeadScreen {
pub fn new(score: u64, font: &Font, style: &FontStyle) -> Result<Self> {
let rendered_score = font.render(&score.to_string(), &style)?;
let rendered_dead_text =
font.render("You died, press Esc to continue\nYour score:", &style)?;
Ok(DeadScreen {
rendered_dead_text,
rendered_score,
})
}
}
impl Screen for DeadScreen {
fn update(
&mut self,
window: &mut Window,
font: &Font,
style: &FontStyle,
) -> Result<Option<Box<dyn Screen>>> {
let board = window.keyboard();
if check_multiple_pressed(&board, &[Key::Escape, Key::Return]) {
return Ok(Some(Box::new(InGameScreen::new(font, style)?)));
}
Ok(None)
}
fn draw(&self, window: &mut Window, _: &Font, _: &FontStyle) -> Result<()> {
window.draw_ex(
&Rectangle::new((200, 150), (380, 200)),
Img(&self.rendered_dead_text),
Transform::IDENTITY,
2,
);
window.draw_ex(
&Rectangle::new((350, 350), (100, 100)),
Img(&self.rendered_score),
Transform::IDENTITY,
1,
);
return Ok(());
}
}
|
#[macro_use]
extern crate detour;
extern crate winapi;
use std::ptr::null_mut;
use winapi::shared::{
minwindef::LPVOID,
ntdef::LPCWSTR,
windef::HWND,
};
static_detour! {
static MessageBoxWHook: unsafe extern "system" fn(HWND, LPCWSTR, LPCWSTR, u32) -> i32;
}
type MessageBoxW = unsafe extern "system" fn(HWND, LPCWSTR, LPCWSTR, u32) -> i32;
unsafe fn clear_last_error() {
use winapi::um::errhandlingapi::SetLastError;
SetLastError(0);
}
unsafe fn show_last_error() {
use winapi::um::{
errhandlingapi::GetLastError,
winuser::{MB_OK, MessageBoxA},
};
let e = GetLastError();
let e = format!("{}\0", e.to_string());
MessageBoxA(null_mut(), e.as_ptr() as _, "\0".as_ptr() as _, MB_OK);
}
fn hook_message_box_w(h_wnd: HWND, _: LPCWSTR, _: LPCWSTR, u_type: u32) -> i32 {
unsafe {
MessageBoxWHook.call(
h_wnd,
"Ops hooked by detour-rs!\0".encode_utf16().collect::<Vec<_>>().as_ptr(),
"Ops hooked by detour-rs!\0".encode_utf16().collect::<Vec<_>>().as_ptr(),
u_type,
)
}
}
unsafe fn detour_message_box_w() {
use winapi::um::libloaderapi::GetModuleHandleA;
let module_address = GetModuleHandleA("USER32.dll\0".as_ptr() as _) as u64;
let message_box_w_address = module_address + 0x72AD0;
let message_box_w = *(&message_box_w_address as *const _ as *const MessageBoxW);
let message_box_w_hook = MessageBoxWHook.initialize(message_box_w, hook_message_box_w).unwrap();
message_box_w_hook.enable().unwrap();
}
unsafe extern "system" fn init_hook(_: LPVOID) -> u32 {
detour_message_box_w();
0
}
#[no_mangle]
#[allow(non_snake_case)]
pub extern "system" fn DllMain(_: winapi::shared::minwindef::HINSTANCE, reason: u32, _: LPVOID) -> i32 {
use winapi::um::processthreadsapi::CreateThread;
match reason {
1 => unsafe { CreateThread(null_mut(), 0, Some(init_hook), null_mut(), 0, null_mut()); }
0 => (),
_ => (),
}
1
}
|
//! Library of KCP on Tokio
extern crate bytes;
#[macro_use]
extern crate futures;
extern crate kcp;
extern crate mio;
#[macro_use]
extern crate tokio_core;
extern crate tokio_io;
extern crate rand;
extern crate time;
#[macro_use]
extern crate log;
use time::Timespec;
pub use self::config::{KcpConfig, KcpNoDelayConfig};
pub use self::listener::{Incoming, KcpListener};
pub use self::stream::{KcpClientStream, KcpStream, KcpStreamNew};
mod skcp;
mod session;
mod kcp_io;
mod stream;
mod listener;
mod config;
mod debug;
#[inline]
fn as_millisec(timespec: &Timespec) -> u32 {
(timespec.sec * 1000 + timespec.nsec as i64 / 1000 / 1000) as u32
}
#[inline]
fn current() -> u32 {
let timespec = time::get_time();
as_millisec(×pec)
}
|
// This is based on example 3, but adds in highlighting visible tiles.
//
// Comments that duplicate previous examples have been removed for brevity.
//////////////////////////////////////////////////////////////
rltk::add_wasm_support!();
use rltk::prelude::*;
extern crate rand;
use crate::rand::Rng;
#[derive(PartialEq, Copy, Clone)]
enum TileType {
Wall,
Floor,
}
const WIDTH: i32 = 40;
const HEIGHT: i32 = 25;
// Just like example 3, but we're adding an additional vector: visible
struct State {
map: Vec<TileType>,
player_position: usize,
visible: Vec<bool>,
}
pub fn xy_idx(x: i32, y: i32) -> usize {
(y as usize * WIDTH as usize) + x as usize
}
pub fn idx_xy(idx: usize) -> (i32, i32) {
(idx as i32 % WIDTH, idx as i32 / WIDTH)
}
impl State {
pub fn new() -> State {
// Same as example 3, but we've added the visible tiles
let mut state = State {
map: vec![TileType::Floor; (WIDTH * HEIGHT) as usize],
player_position: xy_idx(WIDTH / 2, HEIGHT / 2),
visible: vec![false; (WIDTH * HEIGHT) as usize],
};
for x in 0..WIDTH {
state.map[xy_idx(x, 0)] = TileType::Wall;
state.map[xy_idx(x, HEIGHT - 1)] = TileType::Wall;
}
for y in 0..HEIGHT {
state.map[xy_idx(0, y)] = TileType::Wall;
state.map[xy_idx(WIDTH - 1, y)] = TileType::Wall;
}
let mut rng = rand::thread_rng();
for _ in 0..400 {
let x = rng.gen_range(1, WIDTH - 1);
let y = rng.gen_range(1, HEIGHT - 1);
let idx = xy_idx(x, y);
if state.player_position != idx {
state.map[idx] = TileType::Wall;
}
}
state
}
pub fn move_player(&mut self, delta_x: i32, delta_y: i32) {
let current_position = idx_xy(self.player_position);
let new_position = (current_position.0 + delta_x, current_position.1 + delta_y);
let new_idx = xy_idx(new_position.0, new_position.1);
if self.map[new_idx] == TileType::Floor {
self.player_position = new_idx;
}
}
}
// Implement the game loop
impl GameState for State {
#[allow(non_snake_case)]
fn tick(&mut self, ctx: &mut Rltk) {
let mut draw_batch = DrawBatch::new();
match ctx.key {
None => {} // Nothing happened
Some(key) => {
// A key is pressed or held
match key {
// Numpad
VirtualKeyCode::Numpad8 => self.move_player(0, -1),
VirtualKeyCode::Numpad4 => self.move_player(-1, 0),
VirtualKeyCode::Numpad6 => self.move_player(1, 0),
VirtualKeyCode::Numpad2 => self.move_player(0, 1),
// Numpad diagonals
VirtualKeyCode::Numpad7 => self.move_player(-1, -1),
VirtualKeyCode::Numpad9 => self.move_player(1, -1),
VirtualKeyCode::Numpad1 => self.move_player(-1, 1),
VirtualKeyCode::Numpad3 => self.move_player(1, 1),
// Cursors
VirtualKeyCode::Up => self.move_player(0, -1),
VirtualKeyCode::Down => self.move_player(0, 1),
VirtualKeyCode::Left => self.move_player(-1, 0),
VirtualKeyCode::Right => self.move_player(1, 0),
_ => {} // Ignore all the other possibilities
}
}
}
// Set all tiles to not visible
for v in &mut self.visible {
*v = false;
}
// Obtain the player's visible tile set, and apply it
let player_position = self.index_to_point2d(self.player_position);
let fov = rltk::field_of_view_set(player_position, 8, self);
// Note that the steps above would generally not be run every frame!
for idx in &fov {
self.visible[xy_idx(idx.x, idx.y)] = true;
}
// Clear the screen
draw_batch.target(0);
draw_batch.cls();
// Iterate the map array, incrementing coordinates as we go.
let mut y = 0;
let mut x = 0;
for (i, tile) in self.map.iter().enumerate() {
// Render a tile depending upon the tile type; now we check visibility as well!
let mut fg = RGB::from_f32(1.0, 1.0, 1.0);
let glyph: u8;
match tile {
TileType::Floor => {
glyph = 0;
}
TileType::Wall => {
glyph = 1;
}
}
if !self.visible[i] {
fg = fg * 0.3;
} else {
let distance = 1.0
- (DistanceAlg::Pythagoras.distance2d(Point::new(x, y), player_position)
as f32
/ 10.0);
fg = RGB::from_f32(distance, distance, distance);
}
draw_batch.set(
Point::new(x, y),
ColorPair::new(fg, RGB::from_f32(0., 0., 0.)),
glyph,
);
// Move the coordinates
x += 1;
if x > WIDTH - 1 {
x = 0;
y += 1;
}
}
// Render the player @ symbol
let ppos = idx_xy(self.player_position);
draw_batch.target(1);
draw_batch.cls();
draw_batch.set(
Point::from_tuple(ppos),
ColorPair::new(RGB::from_f32(1.0, 1.0, 1.0), RGB::from_f32(0., 0., 0.)),
2,
);
draw_batch.submit(0);
render_draw_buffer(ctx);
}
}
// To work with RLTK's algorithm features, we need to implement some the Algorithm2D trait for our map.
// First, default implementations of some we aren't using yet (more on these later!)
impl BaseMap for State {
// We'll use this one - if its a wall, we can't see through it
fn is_opaque(&self, idx: usize) -> bool {
self.map[idx as usize] == TileType::Wall
}
}
impl Algorithm2D for State {
fn dimensions(&self) -> Point {
Point::new(WIDTH, HEIGHT)
}
}
rltk::embedded_resource!(TILE_FONT, "../resources/example_tiles.png");
fn main() {
rltk::link_resource!(TILE_FONT, "resources/example_tiles.png");
let mut context = Rltk::init_raw(
WIDTH as u32 * 16,
HEIGHT as u32 * 16,
"RLTK Example 07 - Tiles",
);
let font = context.register_font(rltk::Font::load("resources/example_tiles.png", (16, 16)));
context.register_console(
rltk::SimpleConsole::init(WIDTH as u32, HEIGHT as u32, &context.backend),
font,
);
context.register_console(
rltk::SparseConsole::init(WIDTH as u32, HEIGHT as u32, &context.backend),
font,
);
let gs = State::new();
rltk::main_loop(context, gs);
}
|
use crate::cell::CellValue;
use crate::{Board, Cell, ConstraintGroup, Puzzle};
use bitflags::bitflags;
use bitvec::prelude::*;
#[derive(Default)]
struct Status {
row_status: BitVec,
column_status: BitVec,
}
bitflags! {
#[derive(Default)]
struct Options: u8 {
const AUTO_CROSS_COMPLETED = 0b0001;
}
}
/// A picross game. Manages constraints, board, and completion.
pub struct Picross<C: CellValue> {
puzzle: Puzzle<C>,
board: Board<C>,
status: Status,
options: Options,
}
impl<C: CellValue> Picross<C> {
/// Creates a new Picross game for `puzzle`.
pub fn new(puzzle: Puzzle<C>) -> Self {
let mut picross = Picross {
status: Status {
row_status: bitvec![0; puzzle.row_constraints().len()],
column_status: bitvec![0; puzzle.column_constraints().len()],
},
options: Options::AUTO_CROSS_COMPLETED,
board: Board::new_empty(puzzle.row_constraints().len(), puzzle.column_constraints().len()),
puzzle,
};
for r in 0..picross.height() {
picross.check_row(r);
}
for c in 0..picross.width() {
picross.check_column(c);
}
picross
}
/// Returns an iterator over the cells in this game's board, and their positions.
pub fn cells(&self) -> impl Iterator<Item = (usize, usize, &Cell<C>)> {
self.board.cells()
}
/// Crosses out the cell at `row` and `column`.
pub fn cross_out(&mut self, row: usize, column: usize) -> bool {
*self.board.get_mut(row, column) = Cell::CrossedOut;
self.check(row, column);
self.is_solved()
}
/// Clears the cell at `row` and `column`.
pub fn clear_at(&mut self, row: usize, column: usize) -> bool {
*self.board.get_mut(row, column) = Cell::Empty;
self.check(row, column);
self.is_solved()
}
/// Places `value` into the cell at `row` and `column`.
/// Returns whether or not the puzzle is solved afterwards.
pub fn place_at(&mut self, value: C, row: usize, column: usize) -> bool {
*self.board.get_mut(row, column) = Cell::Filled(value);
self.check(row, column);
self.is_solved()
}
fn check_row(&mut self, row: usize) {
let completed = self.puzzle.row_is_solved(&self.board, row);
self.status.row_status.set(row, completed);
if self.options.contains(Options::AUTO_CROSS_COMPLETED) {
if completed {
for c in 0..self.width() {
let cell = self.board.get_mut(row, c);
if let Cell::Empty = *cell {
*cell = Cell::CrossedOut;
}
}
} else {
for c in 0..self.width() {
if let Cell::CrossedOut = *self.board.get_mut(row, c) {
if !self.puzzle.column_is_solved(&self.board, c) {
*self.board.get_mut(row, c) = Cell::Empty
}
}
}
}
}
}
fn check_column(&mut self, column: usize) {
let completed = self.puzzle.column_is_solved(&self.board, column);
self.status.column_status.set(column, completed);
if self.options.contains(Options::AUTO_CROSS_COMPLETED) {
if completed {
for r in 0..self.height() {
let cell = self.board.get_mut(r, column);
if let Cell::Empty = *cell {
*cell = Cell::CrossedOut;
}
}
} else {
for r in 0..self.height() {
if let Cell::CrossedOut = *self.board.get_mut(r, column) {
if !self.puzzle.row_is_solved(&self.board, r) {
*self.board.get_mut(r, column) = Cell::Empty;
}
}
}
}
}
}
/// Checks whether or not the given row and column are solved and stores the result.
fn check(&mut self, row: usize, column: usize) {
self.check_row(row);
self.check_column(column);
}
/// Returns the status of the puzzle's rows and columns.
pub fn status(&self) -> (&BitVec, &BitVec) {
(&self.status.row_status, &self.status.column_status)
}
/// Checks whether or not the puzzle is solved. all the row and column constraints are satisfied.
pub fn is_solved(&self) -> bool {
self.status.row_status.all() && self.status.column_status.all()
}
/// Gets the cell at `row` and `column`.
pub fn get(&self, row: usize, column: usize) -> &Cell<C> {
self.board.get(row, column)
}
/// Returns the row constraint group.
pub fn row_constraints(&self) -> &ConstraintGroup<C> {
&self.puzzle.row_constraints()
}
/// Returns the column constraint group.
pub fn column_constraints(&self) -> &ConstraintGroup<C> {
&self.puzzle.column_constraints()
}
/// The width of this board.
pub fn width(&self) -> usize {
self.board.width()
}
/// The height of this board.
pub fn height(&self) -> usize {
self.board.height()
}
}
|
use crate::glyph::GlyphStore;
pub struct Size {
width: i32,
height: i32
}
impl Size {
pub fn new(width: i32, height: i32) -> Self {
Size {
width,
height,
}
}
}
pub struct Point {
inline: i32,
block: i32,
}
impl Point {
pub fn new(inline: i32, block: i32) -> Self {
Point {
inline,
block
}
}
}
pub struct Rect {
origin: Point,
size: Size
}
impl Rect {
pub fn new(origin: Point, size: Size) -> Self {
Rect {
origin,
size
}
}
}
pub struct TextFragment {
glyphs: GlyphStore,
rect: Rect,
}
impl TextFragment {
pub fn new(store: GlyphStore, rect: Rect) -> Self {
TextFragment {
glyphs: store,
rect,
}
}
} |
use super::tokens::Token;
use super::errors::*;
use super::common::ImmutableString;
/// Converts text into a stream of tokens.
pub struct Scanner {
pos: usize,
line_number: usize,
token_start: usize,
token_start_line: usize,
chars: Vec<char>, // todo: use an iterator instead?
current_token: Option<Token>,
}
impl Scanner {
/// Creates a new scanner based on the provided text.
pub fn new(text: &str) -> Scanner {
Scanner {
pos: 0,
line_number: 0,
token_start: 0,
token_start_line: 0,
chars: text.chars().collect(),
current_token: None,
}
}
/// Moves to and returns the next token.
pub fn scan(&mut self) -> Result<Option<Token>, ParseError> {
self.skip_whitespace();
self.token_start = self.pos;
self.token_start_line = self.line_number;
if let Some(current_char) = self.current_char() {
let token_result = match current_char {
'{' => {
self.move_next_char();
Ok(Token::OpenBrace)
},
'}' => {
self.move_next_char();
Ok(Token::CloseBrace)
},
'[' => {
self.move_next_char();
Ok(Token::OpenBracket)
},
']' => {
self.move_next_char();
Ok(Token::CloseBracket)
},
',' => {
self.move_next_char();
Ok(Token::Comma)
},
':' => {
self.move_next_char();
Ok(Token::Colon)
},
'"' => self.parse_string(),
'/' => {
match self.peek_char() {
Some('/') => Ok(self.parse_comment_line()),
Some('*') => self.parse_comment_block(),
_ => Err(ParseError::new(self.token_start, "Unexpected token.")),
}
},
_ => {
if current_char == '-' || self.is_digit() {
self.parse_number()
} else if self.try_move_word("true") {
Ok(Token::Boolean(true))
} else if self.try_move_word("false") {
Ok(Token::Boolean(false))
} else if self.try_move_word("null") {
Ok(Token::Null)
} else {
Err(ParseError::new(self.token_start, "Unexpected token."))
}
}
};
match token_result {
Ok(token) => {
self.current_token = Some(token.clone());
Ok(Some(token))
},
Err(err) => Err(err),
}
} else {
self.current_token = None;
Ok(None)
}
}
/// Gets the start position of the token.
pub fn token_start(&self) -> usize {
self.token_start
}
/// Gets the end position of the token.
pub fn token_end(&self) -> usize {
self.pos
}
/// Gets the line the token starts on.
pub fn token_start_line(&self) -> usize {
self.token_start_line
}
/// Gets the line the token ends on.
pub fn token_end_line(&self) -> usize {
self.line_number
}
/// Gets the current token.
pub fn token(&self) -> Option<Token> {
self.current_token.as_ref().map(|x| x.to_owned())
}
fn parse_string(&mut self) -> Result<Token, ParseError> {
#[cfg(debug_assertions)]
self.assert_char('"');
let start_pos = self.pos;
let mut text = String::new();
let mut last_was_backslash = false;
let mut found_end_string = false;
while let Some(current_char) = self.move_next_char() {
if last_was_backslash {
match current_char {
'"' | '\\' | '/' | 'b' | 'f' | 'n' | 'r' | 't' => {
text.push(current_char);
},
'u' => {
text.push(current_char);
let hex_start_pos = self.pos - 1;
// expect four hex values
for _ in 0..4 {
if let Some(current_char) = self.move_next_char() {
text.push(current_char);
}
if !self.is_hex() {
return Err(ParseError::new(hex_start_pos, "Expected four hex digits."));
}
}
},
_ => return Err(ParseError::new(start_pos, "Invalid escape.")),
}
last_was_backslash = false;
} else if current_char == '"' {
found_end_string = true;
break;
} else {
last_was_backslash = current_char == '\\';
text.push(current_char);
}
}
if found_end_string {
self.move_next_char();
Ok(Token::String(ImmutableString::new(text)))
} else {
Err(ParseError::new(start_pos, "Unterminated string literal"))
}
}
fn parse_number(&mut self) -> Result<Token, ParseError> {
let mut text = String::new();
if self.is_negative_sign() {
text.push('-');
self.move_next_char();
}
if self.is_zero() {
text.push('0');
self.move_next_char();
} else if self.is_one_nine() {
text.push(self.current_char().unwrap());
self.move_next_char();
while self.is_digit() {
text.push(self.current_char().unwrap());
self.move_next_char();
}
} else {
return Err(ParseError::new(self.pos, "Expected a digit to follow a negative sign."));
}
if self.is_decimal_point() {
text.push('.');
self.move_next_char();
if !self.is_digit() {
return Err(ParseError::new(self.pos, "Expected a digit."));
}
while self.is_digit() {
text.push(self.current_char().unwrap());
self.move_next_char();
}
}
match self.current_char() {
Some('e') | Some('E') => {
text.push(self.current_char().unwrap());
match self.move_next_char() {
Some('-') | Some('+') => {
text.push(self.current_char().unwrap());
self.move_next_char();
if !self.is_digit() {
return Err(ParseError::new(self.pos, "Expected a digit."));
}
while self.is_digit() {
text.push(self.current_char().unwrap());
self.move_next_char();
}
}
_ => {
return Err(ParseError::new(self.pos, "Expected plus or minus symbol in number literal."));
}
}
}
_ => {},
}
Ok(Token::Number(ImmutableString::new(text)))
}
fn parse_comment_line(&mut self) -> Token {
let mut text = String::new();
self.assert_then_move_char('/');
#[cfg(debug_assertions)]
self.assert_char('/');
while let Some(current_char) = self.move_next_char() {
if self.is_new_line() {
break;
}
text.push(current_char);
}
Token::CommentLine(ImmutableString::new(text))
}
fn parse_comment_block(&mut self) -> Result<Token, ParseError> {
let token_start = self.pos;
let mut text = String::new();
self.assert_then_move_char('/');
#[cfg(debug_assertions)]
self.assert_char('*');
let mut found_end = false;
while let Some(current_char) = self.move_next_char() {
if current_char == '*' && self.peek_char() == Some('/') {
found_end = true;
break;
}
text.push(current_char);
}
if found_end {
self.assert_then_move_char('*');
self.assert_then_move_char('/');
Ok(Token::CommentBlock(ImmutableString::new(text)))
} else {
Err(ParseError::new(token_start, "Unterminated comment block."))
}
}
fn skip_whitespace(&mut self) {
while let Some(current_char) = self.current_char() {
if current_char.is_whitespace() {
self.move_next_char();
} else {
break;
}
}
}
fn try_move_word(&mut self, text: &str) -> bool {
// todo: debug assert no newlines
let mut i = self.pos;
for c in text.chars() {
if let Some(current_char) = self.chars.get(i) {
if *current_char != c {
return false;
}
} else {
return false;
}
i += 1;
}
if let Some(next_char) = self.chars.get(i) {
if next_char.is_alphanumeric() {
return false;
}
}
self.pos = i;
true
}
fn assert_then_move_char(&mut self, _character: char) {
#[cfg(debug_assertions)]
self.assert_char(_character);
self.move_next_char();
}
#[cfg(debug_assertions)]
fn assert_char(&mut self, character: char) {
let current_char = self.current_char();
debug_assert!(current_char == Some(character), "Expected {:?}, was {:?}", character, current_char);
}
fn move_next_char(&mut self) -> Option<char> {
if self.current_char() == Some('\n') {
self.line_number += 1;
}
self.pos += 1;
self.current_char()
}
fn peek_char(&self) -> Option<char> {
self.chars.get(self.pos + 1).map(|x| x.to_owned())
}
fn current_char(&self) -> Option<char> {
self.chars.get(self.pos).map(|x| x.to_owned())
}
fn is_new_line(&self) -> bool {
match self.current_char() {
Some('\n') => true,
Some('\r') => self.peek_char() == Some('\n'),
_ => false,
}
}
fn is_hex(&self) -> bool {
self.is_digit() || match self.current_char() {
Some(current_char) => current_char >= 'a' && current_char <= 'f'
|| current_char >= 'A' && current_char <= 'F',
_ => false,
}
}
fn is_digit(&self) -> bool {
self.is_one_nine() || self.is_zero()
}
fn is_zero(&self) -> bool {
self.current_char() == Some('0')
}
fn is_one_nine(&self) -> bool {
match self.current_char() {
Some(current_char) => current_char >= '1' && current_char <= '9',
_ => false,
}
}
fn is_negative_sign(&self) -> bool {
self.current_char() == Some('-')
}
fn is_decimal_point(&self) -> bool {
self.current_char() == Some('.')
}
}
#[cfg(test)]
mod tests {
use super::Scanner;
use super::super::common::{ImmutableString};
use super::super::tokens::{Token};
#[test]
fn it_tokenizes_string() {
assert_has_tokens(
r#""t\"est", "\r\n\n\ua0B9","#,
vec![
Token::String(ImmutableString::from(r#"t\"est"#)),
Token::Comma,
Token::String(ImmutableString::from("\\r\\n\\n\\ua0B9")),
Token::Comma,
]
);
}
#[test]
fn it_tokenizes_numbers() {
assert_has_tokens(
"0, 0.123, -198, 0e-345, 0.3e+025,",
vec![
Token::Number(ImmutableString::from("0")),
Token::Comma,
Token::Number(ImmutableString::from("0.123")),
Token::Comma,
Token::Number(ImmutableString::from("-198")),
Token::Comma,
Token::Number(ImmutableString::from("0e-345")),
Token::Comma,
Token::Number(ImmutableString::from("0.3e+025")),
Token::Comma,
]
);
}
#[test]
fn it_tokenizes_simple_tokens() {
assert_has_tokens(
"{}[],:true,false,null,",
vec![
Token::OpenBrace,
Token::CloseBrace,
Token::OpenBracket,
Token::CloseBracket,
Token::Comma,
Token::Colon,
Token::Boolean(true),
Token::Comma,
Token::Boolean(false),
Token::Comma,
Token::Null,
Token::Comma,
]);
}
#[test]
fn it_tokenizes_comment_line() {
assert_has_tokens(
"//test\n//t\r\n// test\n,",
vec![
Token::CommentLine(ImmutableString::from("test")),
Token::CommentLine(ImmutableString::from("t")),
Token::CommentLine(ImmutableString::from(" test")),
Token::Comma,
]);
}
#[test]
fn it_tokenizes_comment_blocks() {
assert_has_tokens(
"/*test\n *//* test*/,",
vec![
Token::CommentBlock(ImmutableString::from("test\n ")),
Token::CommentBlock(ImmutableString::from(" test")),
Token::Comma,
]);
}
fn assert_has_tokens(text: &str, tokens: Vec<Token>) {
let mut scanner = Scanner::new(text);
let mut scanned_tokens = Vec::new();
loop {
match scanner.scan() {
Ok(Some(token)) => scanned_tokens.push(token),
Ok(None) => break,
Err(err) => panic!("Error parsing: {:?}", err),
}
}
assert_eq!(scanned_tokens, tokens);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.