text stringlengths 8 4.13M |
|---|
// Copyright 2014 nerd-games.com.
//
// Licensed under the Apache License, Version 2.0.
// See: http://www.apache.org/licenses/LICENSE-2.0
// This file may only be copied, modified and distributed according to those terms.
use gl;
use gl::types::*;
use std::mem;
use super::shader::*;
use err::*;
/// A class that holds the ShaderProgram handle
///
/// Shader can be bound using `ShaderProgram::attach(Shader)`
/// and linked using `ShaderProgram::link()`.
/// Call `ShaderProgram::use_program()` to use the ShaderProgram.
/// # Examples
/// ```rust
/// let shader_program = ShaderProgram::new();
/// // Load vertex and fragment shaders
/// let vert_shader = Shader::new("resources/shader/basic.vert", VertexShader);
/// let frag_shader = Shader::new("resources/shader/basic.frag", FragmentShader);
/// // attach the shaders
/// shader_program.attach(&vert_shader);
/// shader_program.attach(&frag_shader);
/// // and link them!
/// shader_program.link();
/// // detach them after linking, so they can be removed from memory
/// // once linked only the shaderprogram is needed
/// shader_program.detach(&vert_shader);
/// shader_program.detach(&frag_shader);
/// ```
pub struct ShaderProgram {
id: GLuint
}
impl ShaderProgram {
/// Creates a new ShaderProgram instance
#[inline]
pub fn new() -> ShaderProgram {
ShaderProgram{id: gl::CreateProgram()}
}
#[inline]
fn check_for_linking_errors(&self) -> Option<GLError> {
unsafe {
if get_status(self.id, gl::LINK_STATUS) == gl::FALSE as GLint {
get_info_log(self.id)
} else { None }
}
}
#[inline(always)]
pub fn attach(&self, shader: &Shader) {
gl::AttachShader(self.id, shader.id);
}
#[inline(always)]
pub fn detach(&self, shader: &Shader) {
gl::DetachShader(self.id, shader.id);
}
#[inline]
pub fn bind_attrib_location(&self, index: GLuint, name: &str) {
unsafe { gl::BindAttribLocation(self.id, index, mem::transmute(name.as_ptr())) }
}
#[inline]
pub fn link(&self) -> Option<GLError> {
gl::LinkProgram(self.id);
self.check_for_linking_errors()
}
#[inline(always)]
pub fn use_program(&self) {
gl::UseProgram(self.id);
}
#[inline]
pub fn delete(&self) {
debug!("Deleting shader program {}", self.id);
gl::DeleteProgram(self.id);
}
}
impl Drop for ShaderProgram {
#[inline]
fn drop(&mut self) {
self.delete();
}
}
|
#![macro_use]
#[macro_export]
#[cfg(test)]
macro_rules! fail {
($loc:expr, $fmt:expr) => ({
panic!(concat!("Error({}): ", $fmt), $loc);
});
($loc:expr, $fmt:expr, $($arg:tt)*) => ({
panic!(concat!("Error({}): ", $fmt), $loc, $($arg)*);
});
}
#[cfg(not(test))]
macro_rules! fail {
($loc:expr, $fmt:expr) => ({
println!(concat!("Error({}): ", $fmt), $loc);
$crate::std::process::exit(1);
});
($loc:expr, $fmt:expr, $($arg:tt)*) => ({
println!(concat!("Error({}): ", $fmt), $loc, $($arg)*);
$crate::std::process::exit(1);
});
}
|
use rocket::{get, post, response::content, State};
use juniper::RootNode;
use crate::{
db::PrimaryDb,
graphql::{context::Context, mutation_root::MutationRoot, query_root::QueryRoot},
};
pub type Schema = RootNode<'static, QueryRoot, MutationRoot>;
#[get("/")]
pub fn index() -> &'static str { "Hello, world!" }
#[get("/")]
pub fn graphiql() -> content::Html<String> { juniper_rocket::graphiql_source("/graphql") }
#[get("/graphql?<request>")]
pub fn get_graphql_handler(
context : PrimaryDb,
request : juniper_rocket::GraphQLRequest,
schema : State<Schema>,
) -> juniper_rocket::GraphQLResponse {
request.execute(
&schema,
&Context {
connection : context,
},
)
}
#[post("/graphql", data = "<request>")]
pub fn post_graphql_handler(
context : PrimaryDb,
request : juniper_rocket::GraphQLRequest,
schema : State<Schema>,
) -> juniper_rocket::GraphQLResponse {
request.execute(
&schema,
&Context {
connection : context,
},
)
}
|
// Lints are currently suppressed to prevent merge conflicts in case our contributors fix their code
// on their own. This attribute should be removed in the future.
#![allow(warnings)]
pub mod beacon_state_accessors;
pub mod beacon_state_mutators;
pub mod crypto;
pub mod error;
pub mod math;
pub mod misc;
pub mod predicates;
|
#![feature(test)]
extern crate mioco;
extern crate test;
use std::thread;
fn main() {
channel(10);
}
pub fn channel(num_threads: u64) {
let (tx, rx) = mioco::sync::mpsc::channel::<u64>();
mioco::start(move || {
for i in 0u64..num_threads {
let tx = tx.clone();
mioco::spawn(move || {
tx.send(i).unwrap();
});
}
for _ in 0u64..num_threads {
let _ = rx.recv().unwrap();
}
}).unwrap();
}
pub fn channel_threads(num_threads: u64) {
let (tx, rx) = std::sync::mpsc::channel::<u64>();
for i in 0u64..num_threads {
let tx = tx.clone();
thread::spawn(move || {
tx.send(i).unwrap();
});
}
for _ in 0u64..num_threads {
let _ = rx.recv().unwrap();
}
}
#[cfg(test)]
mod tests {
use super::*;
use test::Bencher;
#[bench]
fn bench_rust_channel_10_u64(b: &mut Bencher) {
b.iter(|| channel(10));
}
#[bench]
fn bench_rust_channel_10000_u64(b: &mut Bencher) {
b.iter(|| channel(10000));
}
#[bench]
fn bench_rust_channel_threads_10_u64(b: &mut Bencher) {
b.iter(|| channel_threads(10));
}
#[bench]
fn bench_rust_channel_threads_10000_u64(b: &mut Bencher) {
b.iter(|| channel_threads(10000));
}
}
|
use std::fmt;
use crate::hlc;
use crate::object;
#[derive(Clone, Copy, Debug)]
pub enum TimestampRequirement {
Equals(hlc::Timestamp),
LessThan(hlc::Timestamp),
GreaterThan(hlc::Timestamp),
}
#[derive(Copy, Clone, Debug)]
pub enum KeyComparison {
ByteArray,
Integer,
Lexical
}
impl KeyComparison {
pub fn compare(&self, left: &object::Key, right: &object::Key ) -> i8 {
let l = left.as_bytes();
let r = right.as_bytes();
match self {
KeyComparison::ByteArray => {
let min = if left.len() > right.len() { left.len() } else { right.len() };
for i in 0..min {
if l[i] < r[i] {
return -1
}
if l[i] > r[i] {
return 0
}
}
if l.len() < r.len() {
return -1
}
if l.len() > r.len() {
return 1
}
0
},
KeyComparison::Integer => {
if l.len() == 0 && r.len() == 0 {
return 0
}
if l.len() == 0 && l.len() != 0 {
return -1
}
if l.len() != 0 && r.len() == 0 {
return 1
}
let big_l = num_bigint::BigInt::from_signed_bytes_be(l);
let big_r = num_bigint::BigInt::from_signed_bytes_be(r);
if big_l < big_r {
return -1
}
if big_l > big_r {
return 1
}
return 0
},
KeyComparison::Lexical => {
let sl = String::from_utf8_lossy(l);
let sr = String::from_utf8_lossy(r);
if sl < sr {
return -1
}
if sl > sr {
return 1
}
return 0
}
}
}
}
#[derive(Clone, Debug)]
pub struct KeyRevision {
pub key: object::Key,
pub revision: object::Revision,
}
impl fmt::Display for KeyComparison {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
KeyComparison::ByteArray => write!(f, "KeyComparison(ByteArray)"),
KeyComparison::Integer => write!(f, "KeyComparison(Integer)"),
KeyComparison::Lexical => write!(f, "KeyComparison(Lexical)"),
}
}
}
impl fmt::Display for TimestampRequirement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TimestampRequirement::Equals(_) => write!(f, "Equals"),
TimestampRequirement::LessThan(_) => write!(f, "LessThan"),
TimestampRequirement::GreaterThan(_) => write!(f, "GreaterThan")
}
}
}
#[derive(Clone, Debug)]
pub enum KeyRequirement {
Exists {
key: object::Key
},
MayExist {
key: object::Key
},
DoesNotExist {
key: object::Key
},
TimestampLessThan {
key: object::Key,
timestamp: hlc::Timestamp
},
TimestampGreaterThan {
key: object::Key,
timestamp: hlc::Timestamp
},
TimestampEquals {
key: object::Key,
timestamp: hlc::Timestamp
},
KeyRevision {
key: object::Key,
revision: object::Revision
},
KeyObjectRevision {
key: object::Key,
revision: object::Revision
},
WithinRange {
key: object::Key,
comparison: KeyComparison
}
}
impl fmt::Display for KeyRequirement{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
KeyRequirement::Exists{key} => write!(f, "Exists({})", key),
KeyRequirement::MayExist{key} => write!(f, "MayExist({})", key),
KeyRequirement::DoesNotExist{key} => write!(f, "DoesNotEqual({})", key),
KeyRequirement::TimestampLessThan{key, timestamp} => write!(f, "TimestampLessThan({},{})", key, timestamp),
KeyRequirement::TimestampGreaterThan{key, timestamp} => write!(f, "TimestampGreaterThan({},{})", key, timestamp),
KeyRequirement::TimestampEquals{key, timestamp} => write!(f, "TimestampEquals({},{})", key, timestamp),
KeyRequirement::KeyRevision{key, revision} => write!(f, "KeyRevision({},{})", key, revision),
KeyRequirement::KeyObjectRevision{key, revision} => write!(f, "KeyObjectRevision({},{})", key, revision),
KeyRequirement::WithinRange{key, comparison} => write!(f, "WithinRange({},{})", key, comparison),
}
}
}
#[derive(Clone)]
pub enum TransactionRequirement {
LocalTime {
requirement: TimestampRequirement
},
RevisionLock {
pointer: object::Pointer,
required_revision: object::Revision,
},
VersionBump {
pointer: object::Pointer,
required_revision: object::Revision,
},
RefcountUpdate {
pointer: object::Pointer,
required_refcount: object::Refcount,
new_refcount: object::Refcount
},
DataUpdate {
pointer: object::Pointer,
required_revision: object::Revision,
operation: object::DataUpdateOperation
},
KeyValueUpdate {
pointer: object::Pointer,
required_revision: Option<object::Revision>,
full_content_lock: Vec<KeyRevision>,
key_requirements: Vec<KeyRequirement>
}
}
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::collections::VecDeque;
use std::io::{self, Read, Write};
use std::iter::ExactSizeIterator;
use std::os::unix::io::{AsRawFd, RawFd};
use std::os::unix::net::UnixStream;
const EVENT_SIZE: usize = 4;
const EVENT_BUFFER_LEN_MAX: usize = 16 * EVENT_SIZE;
const EV_SYN: u16 = 0x00;
const EV_KEY: u16 = 0x01;
const EV_REL: u16 = 0x02;
const EV_ABS: u16 = 0x03;
const SYN_REPORT: u16 = 0;
const REL_X: u16 = 0x00;
const REL_Y: u16 = 0x01;
const ABS_X: u16 = 0x00;
const ABS_Y: u16 = 0x01;
// /// Half-way build `EventDevice` with only the `event_socket` defined. Finish building the
// /// `EventDevice` by using `status_socket`.
// pub struct PartialEventDevice(UnixStream);
// impl PartialEventDevice {
// /// Finish build `EventDevice` by providing the `status_socket`.
// pub fn status_socket(self, status_socket: UnixStream) -> EventDevice {
// EventDevice {
// event_socket: self.0,
// status_socket,
// }
// }
// }
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum EventDeviceKind {
/// Produces relative mouse motions, wheel, and button clicks while the real mouse is captured.
Mouse,
/// Produces absolute motion and touch events from the display window's events.
Touchscreen,
/// Produces key events while the display window has focus.
Keyboard,
}
#[derive(Copy, Clone, Default, PartialEq, Eq, Debug)]
pub struct EventEncoded {
pub type_: u16,
pub code: u16,
pub value: u32,
}
impl EventEncoded {
#[inline]
pub fn syn() -> EventEncoded {
EventEncoded {
type_: EV_SYN,
code: SYN_REPORT,
value: 0,
}
}
#[inline]
pub fn absolute(code: u16, value: u32) -> EventEncoded {
EventEncoded {
type_: EV_ABS,
code,
value,
}
}
#[inline]
pub fn absolute_x(x: u32) -> EventEncoded {
Self::absolute(ABS_X, x)
}
#[inline]
pub fn absolute_y(y: u32) -> EventEncoded {
Self::absolute(ABS_Y, y)
}
#[inline]
pub fn key(code: u16, pressed: bool) -> EventEncoded {
EventEncoded {
type_: EV_KEY,
code,
value: if pressed { 1 } else { 0 },
}
}
#[inline]
pub fn from_bytes(v: [u8; 8]) -> EventEncoded {
EventEncoded {
type_: u16::from_le_bytes([v[0], v[1]]),
code: u16::from_le_bytes([v[2], v[3]]),
value: u32::from_le_bytes([v[4], v[5], v[6], v[7]]),
}
}
#[inline]
pub fn to_bytes(&self) -> [u8; 8] {
let a = self.type_.to_le_bytes();
let b = self.code.to_le_bytes();
let c = self.value.to_le_bytes();
[a[0], a[1], b[0], b[1], c[0], c[1], c[2], c[3]]
}
}
/// Encapsulates a virtual event device, such as a mouse or keyboard
pub struct EventDevice {
kind: EventDeviceKind,
event_buffer: VecDeque<u8>,
event_socket: UnixStream,
}
impl EventDevice {
pub fn new(kind: EventDeviceKind, event_socket: UnixStream) -> EventDevice {
let _ = event_socket.set_nonblocking(true);
EventDevice {
kind,
event_buffer: Default::default(),
event_socket,
}
}
#[inline]
pub fn mouse(event_socket: UnixStream) -> EventDevice {
Self::new(EventDeviceKind::Mouse, event_socket)
}
#[inline]
pub fn touchscreen(event_socket: UnixStream) -> EventDevice {
Self::new(EventDeviceKind::Touchscreen, event_socket)
}
#[inline]
pub fn keyboard(event_socket: UnixStream) -> EventDevice {
Self::new(EventDeviceKind::Keyboard, event_socket)
}
#[inline]
pub fn kind(&self) -> EventDeviceKind {
self.kind
}
/// Flushes the buffered events that did not fit into the underlying transport, if any.
///
/// Returns `Ok(true)` if, after this function returns, there all the buffer of events is
/// empty.
pub fn flush_buffered_events(&mut self) -> io::Result<bool> {
while !self.event_buffer.is_empty() {
let written = self.event_socket.write(&self.event_buffer.as_slices().0)?;
if written == 0 {
return Ok(false);
}
self.event_buffer.drain(..written);
}
Ok(true)
}
pub fn is_buffered_events_empty(&self) -> bool {
self.event_buffer.is_empty()
}
pub fn send_report<E: IntoIterator<Item = EventEncoded>>(
&mut self,
events: E,
) -> io::Result<bool>
where
E::IntoIter: ExactSizeIterator,
{
let it = events.into_iter();
if self.event_buffer.len() > (EVENT_BUFFER_LEN_MAX - EVENT_SIZE * (it.len() + 1)) {
return Ok(false);
}
for event in it {
let bytes = event.to_bytes();
self.event_buffer.extend(bytes.iter());
}
self.event_buffer
.extend(EventEncoded::syn().to_bytes().iter());
self.flush_buffered_events()
}
/// Sends the given `event`, returning `Ok(true)` if, after this function returns, there are no
/// buffered events remaining.
pub fn send_event_encoded(&mut self, event: EventEncoded) -> io::Result<bool> {
if !self.flush_buffered_events()? {
return Ok(false);
}
let bytes = event.to_bytes();
let written = self.event_socket.write(&bytes)?;
if written == bytes.len() {
return Ok(true);
}
if self.event_buffer.len() <= (EVENT_BUFFER_LEN_MAX - EVENT_SIZE) {
self.event_buffer.extend(bytes[written..].iter());
}
Ok(false)
}
pub fn recv_event_encoded(&self) -> io::Result<EventEncoded> {
let mut event_bytes = [0; 8];
(&self.event_socket).read_exact(&mut event_bytes)?;
Ok(EventEncoded::from_bytes(event_bytes))
}
}
impl AsRawFd for EventDevice {
fn as_raw_fd(&self) -> RawFd {
self.event_socket.as_raw_fd()
}
}
|
use crate::neatns::network::node::NodeRef;
/// Link between two nodes
#[derive(Copy, Clone, Debug)]
pub struct Link {
pub from: NodeRef,
pub to: NodeRef,
pub weight: f64,
pub enabled: bool,
pub split: bool, // Link has been split
pub innovation: u64, // Global innovation number
}
pub trait Custom: Copy + Clone + Send {
fn new() -> Self;
fn crossover(&self, other: &Self) -> Self;
}
impl Link {
pub fn new(from: NodeRef, to: NodeRef, weight: f64, innovation: u64) -> Link {
Link {
from,
to,
weight,
enabled: true,
split: false,
innovation,
}
}
pub fn crossover(&self, other: &Link) -> Link {
assert_eq!(self.from, other.from);
assert_eq!(self.to, other.to);
assert_eq!(self.innovation, other.innovation);
Link {
from: self.from,
to: self.to,
weight: (self.weight + other.weight) / 2.0,
enabled: self.enabled || other.enabled,
split: self.split && other.split,
innovation: self.innovation,
}
}
pub fn distance(&self, other: &Link) -> f64 {
0.5 * (self.weight - other.weight).tanh().abs()
+ 0.5 * ((self.enabled != other.enabled) as u64) as f64
}
}
|
use crate::types::keyword_type::KeywordType;
use std::fmt::{Display, Error, Formatter};
#[derive(Clone, Debug, PartialEq)]
pub(in crate) struct ValidationError {
// TODO: enhance content
message: String,
keyword: KeywordType,
path: String,
}
impl Display for ValidationError {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "{:?}", self)
}
}
fn normalise_path(path: &str) -> &str {
if path.eq("#/") {
"#"
} else if path.ends_with('/') {
path.strip_suffix('/').unwrap()
} else {
path
}
}
impl ValidationError {
pub(in crate) fn new(path: &str, keyword: KeywordType, message: &str) -> Self {
Self {
path: normalise_path(path).to_string(),
message: message.to_string(),
keyword,
}
}
}
#[cfg(test)]
mod tests {
use super::normalise_path;
use test_case::test_case;
#[test_case("#" => "#" ; "Document root")]
#[test_case("#/" => "#" ; "Document root, with additional /")]
#[test_case("#/path1" => "#/path1" ; "One level into document root")]
#[test_case("#/path1/" => "#/path1" ; "One level into document root, with additional /")]
fn path_normalisation(path_to_normalise: &str) -> &str {
normalise_path(path_to_normalise)
}
}
|
//! Services is a core layer for the app business logic like
//! validation, authorization, etc.
pub mod jwt;
pub mod mocks;
pub mod types;
pub mod user_roles;
pub mod users;
pub mod util;
pub use self::types::Service;
|
use regex::Regex;
use serde::ser::{SerializeMap, Serializer};
use serde_json::Value;
use tracing::{Event, Subscriber};
use tracing_bunyan_formatter::JsonStorage;
use tracing_subscriber::{layer::Context, Layer};
use crate::filters::{EventFilters, Filter};
use crate::worker::{WorkerMessage, SlackBackgroundWorker};
use crate::{config::SlackConfig, message::SlackPayload, worker::worker, ChannelSender};
use std::collections::HashMap;
/// Layer for forwarding tracing events to Slack.
pub struct SlackLayer {
/// Filter events by their target.
///
/// Filter type semantics:
/// - Subtractive: Exclude an event if the target does NOT MATCH a given regex.
/// - Additive: Exclude an event if the target MATCHES a given regex.
target_filters: EventFilters,
/// Filter events by their message.
///
/// Filter type semantics:
/// - Positive: Exclude an event if the message MATCHES a given regex, and
/// - Negative: Exclude an event if the message does NOT MATCH a given regex.
message_filters: Option<EventFilters>,
/// Filter events by fields.
///
/// Filter type semantics:
/// - Positive: Exclude the event if its key MATCHES a given regex.
/// - Negative: Exclude the event if its key does NOT MATCH a given regex.
event_by_field_filters: Option<EventFilters>,
/// Filter fields of events from being sent to Slack.
///
/// Filter type semantics:
/// - Positive: Exclude event fields if the field's key MATCHES any provided regular expressions.
field_exclusion_filters: Option<Vec<Regex>>,
/// Configure the layer's connection to the Slack Webhook API.
config: SlackConfig,
/// An unbounded sender, which the caller must send `WorkerMessage::Shutdown` in order to cancel
/// worker's receive-send loop.
shutdown_sender: ChannelSender,
}
impl SlackLayer {
/// Create a new layer for forwarding messages to Slack, using a specified
/// configuration. This method spawns a task onto the tokio runtime to begin sending tracing
/// events to Slack.
///
/// Returns the tracing_subscriber::Layer impl to add to a registry, an unbounded-mpsc sender
/// used to shutdown the background worker, and a future to spawn as a task on a tokio runtime
/// to initialize the worker's processing and sending of HTTP requests to the Slack API.
pub(crate) fn new(
target_filters: EventFilters,
message_filters: Option<EventFilters>,
event_by_field_filters: Option<EventFilters>,
field_exclusion_filters: Option<Vec<Regex>>,
config: SlackConfig,
) -> (SlackLayer, SlackBackgroundWorker) {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
let layer = SlackLayer {
target_filters,
message_filters,
field_exclusion_filters,
event_by_field_filters,
config,
shutdown_sender: tx.clone(),
};
let worker = SlackBackgroundWorker {
sender: tx,
handle: tokio::spawn(worker(rx))
};
(layer, worker)
}
/// Create a new builder for SlackLayer.
pub fn builder(target_filters: EventFilters) -> SlackLayerBuilder {
SlackLayerBuilder::new(target_filters)
}
}
/// A builder for creating a Slack layer.
///
/// The layer requires a regex for selecting events to be sent to Slack by their target. Specifying
/// no filter (e.g. ".*") will cause an explosion in the number of messages observed by the layer.
///
/// Several methods expose initialization of optional filtering mechanisms, along with Slack
/// configuration that defaults to searching in the local environment variables.
pub struct SlackLayerBuilder {
target_filters: EventFilters,
message_filters: Option<EventFilters>,
event_by_field_filters: Option<EventFilters>,
field_exclusion_filters: Option<Vec<Regex>>,
config: Option<SlackConfig>,
}
impl SlackLayerBuilder {
pub(crate) fn new(target_filters: EventFilters) -> Self {
Self {
target_filters,
message_filters: None,
event_by_field_filters: None,
field_exclusion_filters: None,
config: None,
}
}
/// Filter events by their message.
///
/// Filter type semantics:
/// - Positive: Exclude an event if the message MATCHES a given regex, and
/// - Negative: Exclude an event if the message does NOT MATCH a given regex.
pub fn message_filters(mut self, filters: EventFilters) -> Self {
self.message_filters = Some(filters);
self
}
/// Filter events by fields.
///
/// Filter type semantics:
/// - Positive: Exclude the event if its key MATCHES a given regex.
/// - Negative: Exclude the event if its key does NOT MATCH a given regex.
pub fn event_by_field_filters(mut self, filters: EventFilters) -> Self {
self.event_by_field_filters = Some(filters);
self
}
/// Filter fields of events from being sent to Slack.
///
/// Filter type semantics:
/// - Positive: Exclude event fields if the field's key MATCHES any provided regular expressions.
pub fn field_exclusion_filters(mut self, filters: Vec<Regex>) -> Self {
self.field_exclusion_filters = Some(filters);
self
}
/// Configure the layer's connection to the Slack Webhook API.
pub fn slack_config(mut self, config: SlackConfig) -> Self {
self.config = Some(config);
self
}
/// Create a SlackLayer and its corresponding background worker to (async) send the messages.
pub fn build(self) -> (SlackLayer, SlackBackgroundWorker) {
SlackLayer::new(
self.target_filters,
self.message_filters,
self.event_by_field_filters,
self.field_exclusion_filters,
self.config.unwrap_or_else(SlackConfig::new_from_env),
)
}
}
impl<S> Layer<S> for SlackLayer
where
S: Subscriber + for<'a> tracing_subscriber::registry::LookupSpan<'a>,
{
fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
let current_span = ctx.lookup_current();
let mut event_visitor = JsonStorage::default();
event.record(&mut event_visitor);
let format = || {
const KEYWORDS: [&str; 2] = ["message", "error"];
let target = event.metadata().target();
self.target_filters.process(target)?;
// Extract the "message" field, if provided. Fallback to the target, if missing.
let message = event_visitor
.values()
.get("message")
.map(|v| match v {
Value::String(s) => Some(s.as_str()),
_ => None,
})
.flatten()
.or_else(|| {
event_visitor
.values()
.get("error")
.map(|v| match v {
Value::String(s) => Some(s.as_str()),
_ => None,
})
.flatten()
})
.unwrap_or_else(|| "No message");
self.message_filters.process(message)?;
let mut metadata_buffer = Vec::new();
let mut serializer = serde_json::Serializer::new(&mut metadata_buffer);
let mut map_serializer = serializer.serialize_map(None)?;
// Add all the other fields associated with the event, expect the message we
// already used.
for (key, value) in event_visitor
.values()
.iter()
.filter(|(&key, _)| !KEYWORDS.contains(&key))
.filter(|(&key, _)| self.field_exclusion_filters.process(key).is_ok())
{
self.event_by_field_filters.process(key)?;
map_serializer.serialize_entry(key, value)?;
}
// Add all the fields from the current span, if we have one.
if let Some(span) = ¤t_span {
let extensions = span.extensions();
if let Some(visitor) = extensions.get::<JsonStorage>() {
for (key, value) in visitor.values() {
map_serializer.serialize_entry(key, value)?;
}
}
}
map_serializer.end()?;
let span = match ¤t_span {
Some(span) => {
span.metadata().name()
}
None => "None".into()
};
let metadata = {
let data: HashMap<String, Value> = serde_json::from_slice(metadata_buffer.as_slice()).unwrap();
serde_json::to_string_pretty(&data).unwrap()
};
let message = format!(
concat!(
"*Event [{}]*: \"{}\"\n",
"*Span*: _{}_\n",
"*Target*: _{}_\n",
"*Source*: _{}#L{}_\n",
"*Metadata*:\n",
"```",
"{}",
"```",
),
event.metadata().level().to_string(), message,
span,
target,
event.metadata().file().unwrap_or("Unknown"), event.metadata().line().unwrap_or(0),
metadata
);
Ok(message)
};
let result: Result<String, crate::filters::FilterError> = format();
if let Ok(formatted) = result {
let payload = SlackPayload::new(
self.config.channel_name.clone(),
self.config.username.clone(),
formatted,
self.config.webhook_url.clone(),
self.config.icon_emoji.clone(),
);
if let Err(e) = self.shutdown_sender.send(WorkerMessage::Data(payload)) {
tracing::error!(err = %e, "failed to send slack payload to given channel")
};
}
}
}
|
use super::schema::{entries, feed_history, feeds};
use juniper::GraphQLObject;
use serde::{Deserialize, Serialize};
// TODO: rework schema to make most fields non-nullable?
#[derive(Queryable, PartialEq, Debug, Serialize, Deserialize)]
pub struct Entry {
pub id: Option<String>,
pub feed_id: Option<String>,
pub published: Option<String>,
pub created_at: Option<String>,
pub modified_at: Option<String>,
pub defunct: Option<bool>,
pub json: Option<String>,
pub guid: Option<String>,
pub title: Option<String>,
pub link: Option<String>,
pub summary: Option<String>,
pub content: Option<String>,
pub updated: Option<String>,
}
pub struct EntryUpsert<'a> {
pub skip_update: bool,
pub id: &'a str,
pub feed_id: &'a str,
pub json: &'a str,
pub title: &'a str,
pub link: &'a str,
pub summary: &'a str,
pub content: &'a str,
pub published: &'a str,
pub updated: &'a str,
pub now: &'a str,
}
#[derive(Insertable)]
#[table_name = "entries"]
pub struct EntryNew<'a> {
pub id: &'a str,
pub feed_id: &'a str,
pub published: &'a str,
pub updated: &'a str,
pub created_at: &'a str,
pub modified_at: &'a str,
pub defunct: bool,
pub title: &'a str,
pub link: &'a str,
pub summary: &'a str,
pub content: &'a str,
pub json: &'a str,
}
#[derive(AsChangeset)]
#[table_name = "entries"]
pub struct EntryUpdate<'a> {
pub published: Option<&'a str>,
pub updated: Option<&'a str>,
pub modified_at: Option<&'a str>,
pub defunct: Option<bool>,
pub title: Option<&'a str>,
pub link: Option<&'a str>,
pub summary: Option<&'a str>,
pub content: Option<&'a str>,
pub json: Option<&'a str>,
}
#[derive(Queryable, PartialEq, Debug, Serialize, Deserialize, GraphQLObject)]
#[graphql(description = "An event in feed fetch history")]
pub struct FeedHistory {
pub id: Option<String>,
pub feed_id: Option<String>,
pub created_at: Option<String>,
pub updated_at: Option<String>,
pub src: Option<String>,
pub status: Option<String>,
pub etag: Option<String>,
pub last_modified: Option<String>,
pub json: Option<String>,
pub is_error: Option<bool>,
pub error_text: Option<String>,
}
#[derive(Insertable)]
#[table_name = "feed_history"]
pub struct FeedHistoryNewSuccess<'a> {
pub id: &'a str,
pub feed_id: &'a str,
pub created_at: &'a str,
pub src: &'a str,
pub status: &'a str,
pub etag: &'a str,
pub last_modified: &'a str,
}
#[derive(Insertable)]
#[table_name = "feed_history"]
pub struct FeedHistoryNewError<'a> {
pub id: &'a str,
pub feed_id: &'a str,
pub created_at: &'a str,
pub is_error: bool,
pub error_text: &'a str,
}
#[derive(Queryable, PartialEq, Debug, Serialize, Deserialize)]
pub struct Feed {
pub id: Option<String>,
pub published: Option<String>,
pub created_at: Option<String>,
pub modified_at: Option<String>,
pub url: Option<String>,
pub title: Option<String>,
pub subtitle: Option<String>,
pub link: Option<String>,
pub json: Option<String>,
pub updated: Option<String>,
pub last_entry_published: Option<String>,
}
pub struct FeedUpsert<'a> {
pub id: &'a str,
pub json: &'a str,
pub title: &'a str,
pub link: &'a str,
pub url: &'a str,
pub published: &'a str,
pub updated: &'a str,
pub now: &'a str,
pub last_entry_published: &'a str,
}
#[derive(Insertable)]
#[table_name = "feeds"]
pub struct FeedNew<'a> {
pub id: &'a str,
pub published: &'a str,
pub created_at: &'a str,
pub modified_at: &'a str,
pub url: &'a str,
pub title: &'a str,
pub link: &'a str,
pub json: &'a str,
pub last_entry_published: &'a str,
}
#[derive(AsChangeset)]
#[table_name = "feeds"]
pub struct FeedUpdate<'a> {
pub published: Option<&'a str>,
pub updated: Option<&'a str>,
pub modified_at: Option<&'a str>,
pub url: Option<&'a str>,
pub title: Option<&'a str>,
pub link: Option<&'a str>,
pub json: Option<&'a str>,
pub last_entry_published: Option<&'a str>,
}
|
/// Brainfuck language interpreter
/// As specified here: http://www.muppetlabs.com/~breadbox/bf/
#[macro_use]
extern crate clap;
use std::process;
use std::path::{Path};
use std::io;
use std::io::prelude::*;
use std::fs::File;
use std::collections::VecDeque;
use std::thread;
use std::time::Duration;
use clap::{Arg, App};
macro_rules! exit_with_error(
($($arg:tt)*) => { {
writeln!(&mut ::std::io::stderr(), $($arg)*)
.expect("Failed while printing to stderr");
process::exit(1);
} }
);
macro_rules! println_stderr(
($($arg:tt)*) => { {
let r = writeln!(&mut ::std::io::stderr(), $($arg)*);
r.expect("failed printing to stderr");
} }
);
fn main() {
let args = App::new(crate_name!())
.version(crate_version!())
.version_short("v")
.author(crate_authors!())
.about("Brainfuck interpreter companion to the brain language compiler")
.arg(Arg::with_name("input-file")
.help("The brainfuck file to process. Should contain brainfuck instructions")
.value_name("file")
.takes_value(true)
.required(true)
)
.arg(Arg::with_name("debug-enabled")
.short("D")
.long("debug")
.help("Enables debug mode which outputs debugging information to stderr")
)
.arg(Arg::with_name("delay")
.long("delay")
.takes_value(true)
.help("Delays execution of each instruction by this amount in ms")
)
.get_matches();
let source_path = Path::new(args.value_of("input-file").unwrap());
if !source_path.exists() || !source_path.is_file() {
exit_with_error!("Not a valid file: '{}'", source_path.display());
}
let debug_mode = args.is_present("debug-enabled");
let delay: u64 = if let Some(delay_str) = args.value_of("delay") {
delay_str.parse().unwrap_or_else(|e: std::num::ParseIntError| exit_with_error!("Invalid delay: {}", e))
} else {
0
};
let f = File::open(source_path).unwrap_or_else(|e| {
exit_with_error!("Could not open source file: {}", e);
});
let program = f.bytes().map(
|c| c.expect("Fatal: Could not read char") as char
).collect::<Vec<char>>();
interpret(program, debug_mode, delay);
}
fn interpret(program: Vec<char>, debug: bool, delay: u64) {
let mut buffer: VecDeque<u8> = VecDeque::new();
// Make sure there is at least one cell to begin with
buffer.push_back(0u8);
// p is the position "pointer" in the buffer
let mut p: usize = 0;
// i is the instruction index in the program
let mut i: usize = 0;
loop {
if i >= program.len() {
break;
}
let c = program[i];
i += 1;
match c {
'>' => {
p += 1;
if p >= buffer.len() {
buffer.push_back(0u8);
}
},
'<' => {
if p == 0 {
buffer.push_front(0u8);
}
else {
p -= 1;
}
},
'+' => buffer[p] = buffer[p].wrapping_add(1),
'-' => buffer[p] = buffer[p].wrapping_sub(1),
'.' => print!("{}", buffer[p] as char),
',' => {
let chr = io::stdin().bytes().next();
if chr.is_none() {
buffer[p] = 0;
}
else {
buffer[p] = chr.unwrap().expect("Could not read input");
}
},
'[' => {
if buffer[p] == 0 {
i = find_matching(&program, i - 1) + 1;
}
},
']' => {
if buffer[p] != 0 {
i = find_matching(&program, i - 1) + 1;
}
},
_ => continue,
}
if debug {
println_stderr!("{{\"lastInstructionIndex\": {}, \"lastInstruction\": \"{}\", \"currentPointer\": {}, \"memory\": \"{}\"}}", i-1, c, p,
buffer.iter().fold(String::new(), |acc, v| format!("{} {}", acc, v)));
}
thread::sleep(Duration::from_millis(delay));
}
}
/// Finds the matching '[' or ']' for the given position within the program
/// panics if a match is not found
fn find_matching(program: &Vec<char>, start: usize) -> usize {
let direction: isize = match program[start] {
'[' => 1,
']' => -1,
_ => unreachable!(),
};
let mut count = direction;
let mut current = start;
loop {
if (direction < 0 && current == 0) || (direction > 0 && current >= program.len() - 1) {
panic!("Could not find matching parenthesis for instruction {}", start);
}
current = (current as isize + direction) as usize;
let c = program[current];
count = match c {
'[' => count + 1,
']' => count - 1,
_ => count,
};
if count == 0 {
break;
}
}
current
}
|
#![allow(dead_code, unused_imports)]
#[cfg(test)]
mod tests;
mod tile;
mod orientation;
use tile::Tile;
use orientation::{index_rotated_grid, Rotation, Orientation, MatingSide};
fn parse_input(path: &str) -> Vec<Tile> {
std::fs::read_to_string(path).unwrap()
.split("\n\n")
.map(|section| Tile::from(section))
.collect()
}
#[derive(Debug)]
struct PlacedTile {
tile_index: usize,
x: i32,
y: i32,
orientation: Orientation
}
impl PlacedTile {
fn placed_side(&self, tiles: &[Tile], index: u32) -> u32 {
tiles[self.tile_index].side_with_translations(index, self.orientation)
}
fn place_next(&self, tile_index: usize, side_index: usize, mating_side: MatingSide) -> PlacedTile {
let side_offsets = [(0, -1), (1, 0), (0, 1), (-1, 0)][side_index];
let x = self.x + side_offsets.0;
let y = self.y + side_offsets.1;
let orientation = match mating_side {
MatingSide::NormalTop => Orientation {
rotation: Rotation::from(2 + side_index as u32), flipped: false
},
MatingSide::NormalRight => Orientation {
rotation: Rotation::from(1 + side_index as u32), flipped: false
},
MatingSide::NormalBottom => Orientation {
rotation: Rotation::from(side_index as u32), flipped: false
},
MatingSide::NormalLeft => Orientation {
rotation: Rotation::from(3 + side_index as u32), flipped: false
},
MatingSide::FlippedTop => Orientation {
rotation: Rotation::from(2 + side_index as u32), flipped: true
},
MatingSide::FlippedRight => Orientation {
rotation: Rotation::from(1 + side_index as u32), flipped: true
},
MatingSide::FlippedBottom => Orientation {
rotation: Rotation::from(side_index as u32), flipped: true
},
MatingSide::FlippedLeft => Orientation {
rotation: Rotation::from(3 + side_index as u32), flipped: true
},
};
PlacedTile { tile_index, x, y, orientation }
}
}
fn available_edges(placements: &[PlacedTile], tiles: &[Tile], placement_index: usize) -> Vec<(usize, usize, u32)> {
let px = placements[placement_index].x;
let py = placements[placement_index].y;
let side_adjacency = [(0, -1), (1, 0), (0, 1), (-1, 0)];
side_adjacency.iter()
.enumerate()
.filter(|(_, &(x, y))|
placements.iter().find(|p| p.x == (px + x) && p.y == (py + y)).is_none()
)
.map(|(side, _)|
(placement_index, side, placements[placement_index].placed_side(tiles, side as u32))
)
.collect()
}
fn xor(cond1: bool, cond2: bool) -> bool {
(cond1 || cond2) && !(cond1 && cond2)
}
struct Puzzle(Vec<PlacedTile>);
impl Puzzle {
fn new() -> Self {
Puzzle(Vec::new())
}
fn solve(&mut self, tiles: &[Tile]) {
let mut all_placed = false;
let mut last_placed = 0;
self.place(tiles, 0);
while !all_placed {
println!("\n\nPlacement iteration...");
let mut currently_placed = 0;
all_placed = true;
for i in 0..tiles.len() {
match self.place(tiles, i) {
true => currently_placed += 1,
false => all_placed = false
}
}
last_placed = match last_placed == currently_placed {
true => break,
false => currently_placed
};
}
// Normalize the grid to 0, 0
let x_min = self.0.iter().map(|p| p.x).min().unwrap();
let y_min = self.0.iter().map(|p| p.y).min().unwrap();
for placed in self.0.iter_mut() {
placed.x -= x_min;
placed.y -= y_min;
}
}
fn print(&self, tiles: &[Tile], orientation: Orientation) {
let x_max = self.0.iter().map(|p| p.x).max().unwrap();
let y_max = self.0.iter().map(|p| p.y).max().unwrap();
println!("Status: ");
for y in 0..=y_max {
for x in 0..=x_max {
let (shifted_x, shifted_y) = index_rotated_grid(x as usize, y as usize, x_max as usize + 1, y_max as usize + 1, orientation);
print!("{} ", match self.0.iter().find(|p| p.x as usize == shifted_x && p.y as usize == shifted_y) {
Some(placement) => format!("|{:04} {} {}",
tiles[placement.tile_index].label,
placement.orientation.rotation as u32,
if placement.orientation.flipped { "x" } else { "." }
),
None => String::from("|none 0 f")
});
}
println!("");
}
}
fn tile_index_at(&self, x: i32, y: i32) -> usize {
self.0.iter().find(|p| p.x == x && p.y == y).unwrap().tile_index
}
fn placement_at(&self, x: i32, y: i32) -> usize {
self.0.iter().position(|p| p.x == x && p.y == y).unwrap()
}
fn corner_labels(&self, tiles: &[Tile]) -> [u64; 4] {
let x_max = self.0.iter().map(|p| p.x).max().unwrap();
let y_max = self.0.iter().map(|p| p.y).max().unwrap();
[
tiles[self.tile_index_at(0, 0)].label as u64,
tiles[self.tile_index_at(x_max, 0)].label as u64,
tiles[self.tile_index_at(x_max, y_max)].label as u64,
tiles[self.tile_index_at(0, y_max)].label as u64
]
}
fn place(&mut self, tiles: &[Tile], index: usize) -> bool {
match self.0.iter().find(|t| t.tile_index == index) {
Some(p) => {
println!("- Already placed: {} {:?}", index, p);
return true;
},
None => ()
}
println!("\nAttempting placement of piece {} ({})", index, tiles[index].label);
println!("- {:?} !{:?}", tiles[index].sides, tiles[index].inverse_sides());
// Place the first piece at the origin with no orientation
if self.0.is_empty() {
println!("- Placing initial piece at 0, 0, 0, false");
let orientation = Orientation { rotation: Rotation::RightSideUp, flipped: false };
self.0.push(PlacedTile { tile_index: index, x: 0, y: 0, orientation });
return true;
}
// Generate a mating edge list in form (PlacedTile index, side index, value)
let possible_edges: Vec<(usize, usize, u32)> = (0..(self.0.len()))
.map(|i| available_edges(&self.0, tiles, i))
.flatten()
.collect();
println!("- Possible mates: {:?}", possible_edges);
// See if any mate is possible
let tile = &tiles[index];
// The mate will be of the form (PlacedTile index, side_index, mate_side, mate_flipped)
let mate = possible_edges.iter().find_map(|&(placement_index, side_index, side_value)|
tile.mates(side_value).map(|mating_side| (placement_index, side_index, mating_side, side_value))
);
// If no mate is possible, return false
let mate = match mate {
Some(m) => m,
None => {
println!("- No mate found.");
return false;
}
};
println!(
"- Mate found: [ptidx: {}, ptside: {}, mateside: {:?}, value: {}, mate_value: {}]",
mate.0, mate.1, mate.2, mate.3, tile::invert_side(10, mate.3)
);
// Place the piece
let placement = self.0[mate.0].place_next(index, mate.1, mate.2);
println!("- {}: {:?}", self.0.len(), placement);
self.0.push(placement);
true
}
fn width(&self) -> usize {
self.0.iter().map(|p| p.x).max().unwrap() as usize + 1
}
fn height(&self) -> usize {
self.0.iter().map(|p| p.y).max().unwrap() as usize + 1
}
fn iter_sea_monster_windows<'a>(&'a self, tiles: &'a [Tile], orientation: Orientation) -> SeaMonsterWindowIterator {
let rendered = self.render(tiles, orientation);
SeaMonsterWindowIterator {
rendered, index: 0, width: self.width(), height: self.height()
}
}
fn print_entirety(&self, tiles: &[Tile], orientation: Orientation) {
let rendered = self.render(tiles, orientation);
for y in 0..(self.width() * 8) {
for x in 0..(self.width() * 8) {
let index = (y * (self.width() * 8)) + x;
print!("{}{}{}",
if y % 8 == 0 && x == 0 { "\n" } else { "" },
if x % 8 == 0 { " " } else { "" },
if rendered[index] { "#" } else { "." }
);
}
println!(" ");
}
}
fn render(&self, tiles: &[Tile], orientation: Orientation) -> Vec<bool> {
let unrotated: Vec<bool> = (0..(self.width() * self.height() * 64))
.map(|index| {
let x = index % (self.width() * 8);
let y = index / (self.width() * 8);
// Get the relative piece
let x_panel = x / 8;
let y_panel = y / 8;
// Get the index within there
let x_in_panel = x % 8;
let y_in_panel = y % 8;
// Retrieve it
let placement = self.placement_at(x_panel as i32, y_panel as i32);
let placed_tile = &self.0[placement];
let tile = &tiles[placed_tile.tile_index];
tile.index(x_in_panel, y_in_panel, placed_tile.orientation)
}).collect();
(0..(self.width() * self.height() * 64)).map(|index| {
let (x, y) = (index % (self.width() * 8), index / (self.width() * 8));
let (ind_x, ind_y) = index_rotated_grid(x, y, self.width() * 8, self.width() * 8, orientation);
let transformed_index = ind_y * (self.width() * 8) + ind_x;
unrotated[transformed_index]
}).collect()
}
fn find_sea_monsters(&self, tiles: &[Tile]) -> (usize, Orientation) {
let orientations = [
Orientation { rotation: Rotation::RightSideUp, flipped: false },
Orientation { rotation: Rotation::RotatedOnceClockwise, flipped: false },
Orientation { rotation: Rotation::UpsideDown, flipped: false },
Orientation { rotation: Rotation::RotatedOnceCounterClockwise, flipped: false },
Orientation { rotation: Rotation::RightSideUp, flipped: true },
Orientation { rotation: Rotation::RotatedOnceClockwise, flipped: true },
Orientation { rotation: Rotation::UpsideDown, flipped: true },
Orientation { rotation: Rotation::RotatedOnceCounterClockwise, flipped: true },
];
let options = orientations.iter()
.map(|&orientation| {
let count = self.iter_sea_monster_windows(tiles, orientation)
.filter(|(_, _, window)| is_sea_monster(window))
.map(|(x, y, window)| {
println!("Monster at: {}, {}", x, y);
(x, y, window)
})
.count();
(orientation, count)
})
.collect::<Vec<(Orientation, usize)>>();
println!("{:?}", options);
options
.iter()
.find(|(_, count)| *count > 0)
.map(|&(orientation, count)| (count, orientation))
.unwrap()
}
}
struct SeaMonsterWindowIterator {
rendered: Vec<bool>,
index: usize,
width: usize,
height: usize
}
impl Iterator for SeaMonsterWindowIterator {
type Item = (usize, usize, Vec<bool>);
fn next(&mut self) -> Option<Self::Item> {
let usable_width = (self.width * 8) - 19;
let usable_height = (self.height * 8) - 2;
if self.index == usable_height * usable_width {
return None;
}
// Figure out where it is in the grid
let start_x = self.index % usable_width;
let start_y = self.index / usable_width;
// Iterate and build
let out = (0..60)
.map(|index| (start_x + (index % 20), start_y + (index / 20)))
.map(|(x, y)| {
let index = (y * self.width * 8) + x;
self.rendered[index]
})
.collect();
// Iterate and return
self.index += 1;
Some((start_x, start_y, out))
}
}
// 0 2 4 6 8101214161820
// # |
// # ## ## ###|
// # # # # # # |
fn is_sea_monster(window: &Vec<bool>) -> bool {
let positives = [
(18, 0),
(0, 1), (5, 1), (6, 1), (11, 1), (12, 1), (17, 1), (18, 1), (19, 1),
(1, 2), (4, 2), (7, 2), (10, 2), (13, 2), (16, 2)
];
positives
.iter()
.map(|(x, y)| y * 20 + x)
.all(|idx| window[idx])
}
fn main() {
// Part One
let tiles = parse_input("input.txt");
let mut puzzle = Puzzle::new();
puzzle.solve(&tiles);
puzzle.print(&tiles, Orientation::neutral());
let corner_product: u64 = puzzle.corner_labels(&tiles).iter().product();
println!("Part one: {}", corner_product);
// Part two
let total_hash_count = tiles.iter().map(|t| t.trues()).sum::<usize>();
let (sea_monsters, _) = puzzle.find_sea_monsters(&tiles);
println!("Part two: {}, {}, {}", total_hash_count, sea_monsters, (total_hash_count - (15 * sea_monsters)));
}
|
use async_std::io::Read as AsyncRead;
use async_std::prelude::*;
use async_std::task::{ready, Context, Poll};
use std::io;
use std::pin::Pin;
use std::time::Duration;
use std::io::Write;
use flate2::{GzBuilder, Compression};
#[derive(Debug)]
struct WriteBuf {
buf: Vec<u8>,
}
impl Write for WriteBuf {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.buf.extend(buf);
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
pin_project_lite::pin_project! {
/// An SSE protocol encoder.
#[derive(Debug)]
pub struct Encoder {
buf: Option<Vec<u8>>,
#[pin]
receiver: async_channel::Receiver<Vec<u8>>,
cursor: usize,
gz: flate2::write::GzEncoder<WriteBuf>,
gz_enabled: bool,
}
}
impl AsyncRead for Encoder {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
// Request a new buffer if we don't have one yet.
if let None = self.buf {
self.buf = match ready!(Pin::new(&mut self.receiver).poll_next(cx)) {
Some(mut buf) => {
log::trace!("> Received a new buffer with len {}", buf.len());
if self.gz_enabled {
self.gz.write_all(&buf)?;
self.gz.flush()?;
let inner = self.gz.get_mut();
std::mem::swap(&mut inner.buf, &mut buf);
inner.buf.clear();
}
Some(buf)
}
None => {
log::trace!("> Encoder done reading");
return Poll::Ready(Ok(0));
}
};
};
// Write the current buffer to completion.
let local_buf = self.buf.as_mut().unwrap();
let local_len = local_buf.len();
let max = buf.len().min(local_buf.len());
buf[..max].clone_from_slice(&local_buf[..max]);
self.cursor += max;
// Reset values if we're done reading.
if self.cursor == local_len {
self.buf = None;
self.cursor = 0;
};
// Return bytes read.
Poll::Ready(Ok(max))
}
}
// impl AsyncBufRead for Encoder {
// fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> {
// match ready!(self.project().receiver.poll_next(cx)) {
// Some(buf) => match &self.buf {
// None => self.project().buf = &mut Some(buf),
// Some(local_buf) => local_buf.extend(buf),
// },
// None => {
// if let None = self.buf {
// self.project().buf = &mut Some(vec![]);
// };
// }
// };
// Poll::Ready(Ok(self.buf.as_ref().unwrap()))
// }
// fn consume(self: Pin<&mut Self>, amt: usize) {
// Pin::new(self).cursor += amt;
// }
// }
/// The sending side of the encoder.
#[derive(Debug, Clone)]
pub struct Sender(async_channel::Sender<Vec<u8>>);
/// Create a new SSE encoder.
pub fn encode(is_gzip: bool) -> (Sender, Encoder) {
let (sender, receiver) = async_channel::bounded(1);
let write_buf = WriteBuf { buf: Vec::new() };
let gz = GzBuilder::new()
.write(write_buf, Compression::default());
let encoder = Encoder {
receiver,
buf: None,
cursor: 0,
gz: gz,
gz_enabled: is_gzip,
};
(Sender(sender), encoder)
}
impl Sender {
async fn inner_send(&self, bytes: impl Into<Vec<u8>>) -> io::Result<()> {
self.0
.send(bytes.into())
.await
.map_err(|_| io::Error::new(io::ErrorKind::ConnectionAborted, "sse disconnected"))
}
/// Send a new message over SSE.
pub async fn send(&self, name: &str, data: &str, id: Option<&str>) -> io::Result<()> {
// Write the id
let id_string: String = if let Some(id) = id {
format!("id:{}\n", id)
} else {
"".into()
};
let msg = format!("event:{}\n{}data:{}\n\n", name, id_string, data);
self.inner_send(msg).await?;
Ok(())
}
/// Send a new "retry" message over SSE.
#[allow(dead_code)]
pub async fn send_retry(&self, dur: Duration, id: Option<&str>) -> io::Result<()> {
// Write the id
if let Some(id) = id {
self.inner_send(format!("id:{}\n", id)).await?;
}
// Write the retry section, and end.
let dur = dur.as_secs_f64() as u64;
let msg = format!("retry:{}\n\n", dur);
self.inner_send(msg).await?;
Ok(())
}
}
|
//! # Parse structures from a byte buffer
use super::file::*;
use nom::{
bytes::complete::take, combinator::map, number::complete::le_u32, sequence::tuple, IResult,
};
use std::convert::TryInto;
fn u8_4(i: &[u8]) -> IResult<&[u8], [u8; 4]> {
let (i, slice) = take(4usize)(i)?;
Ok((i, slice.try_into().unwrap()))
}
/// Marker trait that implies that `Self` can be parsed in little-endian mode
#[allow(clippy::upper_case_acronyms)]
pub trait ParseLE: Sized + Copy {
/// Same as `std::mem::size_of::<Self>()`
const BYTE_COUNT: usize;
/// A byte array of the same length that can be parsed as `Self`
type Buf: AsMut<[u8]> + Default;
/// Function to parse the buffer into self
fn parse(i: &[u8]) -> IResult<&[u8], Self>;
}
impl ParseLE for u32 {
const BYTE_COUNT: usize = 4;
type Buf = [u8; 4];
fn parse(input: &[u8]) -> IResult<&[u8], u32> {
le_u32(input)
}
}
impl ParseLE for (u32, u32) {
const BYTE_COUNT: usize = 8;
type Buf = [u8; 8];
fn parse(input: &[u8]) -> IResult<&[u8], (u32, u32)> {
tuple((le_u32, le_u32))(input)
}
}
impl ParseLE for (u32, [u8; 4]) {
const BYTE_COUNT: usize = 8;
type Buf = [u8; 8];
fn parse(input: &[u8]) -> IResult<&[u8], (u32, [u8; 4])> {
tuple((le_u32, u8_4))(input)
}
}
impl ParseLE for (u32, u32, u32) {
const BYTE_COUNT: usize = 12;
type Buf = [u8; 12];
fn parse(input: &[u8]) -> IResult<&[u8], (u32, u32, u32)> {
tuple((le_u32, le_u32, le_u32))(input)
}
}
/// Trait that implements parsing from a FDB file
#[allow(clippy::upper_case_acronyms)]
pub trait ParseFDB: Sized + Copy {
/// The [`ParseLE`] compatible type that is equivalent to `Self`
type IO: ParseLE;
/// Create `Self` from an instance of IO
fn new(i: Self::IO) -> Self;
/// Parse an FDB structure from a input slice
///
/// This function chains [`ParseLE::parse`] with [`ParseFDB::new`]
fn parse(input: &[u8]) -> IResult<&[u8], Self> {
map(Self::IO::parse, Self::new)(input)
}
}
impl ParseFDB for ArrayHeader {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
ArrayHeader {
count: a,
base_offset: b,
}
}
}
impl ParseFDB for FDBTableDefHeader {
type IO = (u32, u32, u32);
fn new((a, b, c): Self::IO) -> Self {
FDBTableDefHeader {
column_count: a,
table_name_addr: b,
column_header_list_addr: c,
}
}
}
impl ParseFDB for FDBTableDataHeader {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
FDBTableDataHeader {
buckets: ArrayHeader::new((a, b)),
}
}
}
impl ParseFDB for FDBColumnHeader {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
FDBColumnHeader {
column_data_type: a,
column_name_addr: b,
}
}
}
impl ParseFDB for FDBRowHeaderListEntry {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
FDBRowHeaderListEntry {
row_header_addr: a,
row_header_list_next_addr: b,
}
}
}
impl ParseFDB for FDBRowHeader {
type IO = (u32, u32);
fn new(io: Self::IO) -> Self {
FDBRowHeader {
fields: ArrayHeader::from(io),
}
}
}
impl ParseFDB for FDBTableHeader {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
FDBTableHeader {
table_def_header_addr: a,
table_data_header_addr: b,
}
}
}
impl ParseFDB for FDBHeader {
type IO = (u32, u32);
fn new((a, b): Self::IO) -> Self {
FDBHeader {
tables: ArrayHeader::from((a, b)),
}
}
}
impl ParseFDB for FDBBucketHeader {
type IO = u32;
fn new(a: Self::IO) -> Self {
FDBBucketHeader {
row_header_list_head_addr: a,
}
}
}
impl ParseFDB for FDBFieldData {
type IO = (u32, [u8; 4]);
fn new((data_type, value): Self::IO) -> Self {
FDBFieldData { data_type, value }
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate rocket;
use rocket::response::content;
#[get("/json")]
fn json() -> content::Json<&'static str> {
content::Json("{ 'hi': 'world' }")
}
#[get("/world")] // <- route attribute
fn world() -> &'static str { // <- request handler
"Hello new, world!"
}
#[get("/")]
fn index() -> &'static str {
"Hello my, world!"
}
fn main() {
rocket::ignite().mount("/", routes![index, world, json])
.launch();
} |
mod args;
use std::borrow::Cow;
pub enum Command<'a> {
Connect { server: Cow<'a, str> },
Disconnect,
Groups,
Users,
}
|
//! Tests auto-converted from "sass-spec/spec/non_conformant/extend-tests"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/non_conformant/extend-tests/001_test_basic.hrx"
#[test]
#[ignore] // unexepected error
fn t001_test_basic() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {@extend .foo}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/002_test_basic.hrx"
#[test]
#[ignore] // unexepected error
fn t002_test_basic() {
assert_eq!(
rsass(
".bar {@extend .foo}\
\n.foo {a: b}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/003_test_basic.hrx"
#[test]
#[ignore] // unexepected error
fn t003_test_basic() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {c: d; @extend .foo}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n.bar {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/004_test_basic.hrx"
#[test]
#[ignore] // wrong result
fn t004_test_basic() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {@extend .foo; c: d}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n.bar {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/005_test_multiple_targets.hrx"
#[test]
#[ignore] // unexepected error
fn t005_test_multiple_targets() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {@extend .foo}\
\n.blip .foo {c: d}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n.blip .foo, .blip .bar {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/006_test_multiple_extendees.hrx"
#[test]
#[ignore] // unexepected error
fn t006_test_multiple_extendees() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {c: d}\
\n.baz {@extend .foo; @extend .bar}\
\n"
)
.unwrap(),
".foo, .baz {\
\n a: b;\
\n}\
\n.bar, .baz {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/007_test_multiple_extends_with_single_extender_and_single_target.hrx"
#[test]
#[ignore] // unexepected error
fn t007_test_multiple_extends_with_single_extender_and_single_target() {
assert_eq!(
rsass(
".foo .bar {a: b}\
\n.baz {@extend .foo; @extend .bar}\
\n"
)
.unwrap(),
".foo .bar, .foo .baz, .baz .bar, .baz .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/008_test_multiple_extends_with_single_extender_and_single_target.hrx"
#[test]
#[ignore] // unexepected error
fn t008_test_multiple_extends_with_single_extender_and_single_target() {
assert_eq!(
rsass(
".foo.bar {a: b}\
\n.baz {@extend .foo; @extend .bar}\
\n"
)
.unwrap(),
".foo.bar, .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/009_test_multiple_extends_with_multiple_extenders_and_single_target.hrx"
#[test]
#[ignore] // unexepected error
fn t009_test_multiple_extends_with_multiple_extenders_and_single_target() {
assert_eq!(
rsass(
".foo .bar {a: b}\
\n.baz {@extend .foo}\
\n.bang {@extend .bar}\
\n"
)
.unwrap(),
".foo .bar, .foo .bang, .baz .bar, .baz .bang {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/010_test_multiple_extends_with_multiple_extenders_and_single_target.hrx"
#[test]
#[ignore] // unexepected error
fn t010_test_multiple_extends_with_multiple_extenders_and_single_target() {
assert_eq!(
rsass(
".foo.bar {a: b}\
\n.baz {@extend .foo}\
\n.bang {@extend .bar}\
\n"
)
.unwrap(),
".foo.bar, .foo.bang, .bar.baz, .baz.bang {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/011_test_chained_extends.hrx"
#[test]
#[ignore] // unexepected error
fn t011_test_chained_extends() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {@extend .foo}\
\n.baz {@extend .bar}\
\n.bip {@extend .bar}\
\n"
)
.unwrap(),
".foo, .bar, .bip, .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/012_test_dynamic_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t012_test_dynamic_extendee() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {@extend #{\".foo\"}}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/013_test_dynamic_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t013_test_dynamic_extendee() {
assert_eq!(
rsass(
"[baz^=\"blip12px\"] {a: b}\
\n.bar {@extend [baz^=\"blip#{12px}\"]}\
\n"
)
.unwrap(),
"[baz^=blip12px], .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/014_test_nested_target.hrx"
#[test]
#[ignore] // unexepected error
fn t014_test_nested_target() {
assert_eq!(
rsass(
".foo .bar {a: b}\
\n.baz {@extend .bar}\
\n"
)
.unwrap(),
".foo .bar, .foo .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/015_test_target_with_child.hrx"
#[test]
#[ignore] // unexepected error
fn t015_test_target_with_child() {
assert_eq!(
rsass(
".foo .bar {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
".foo .bar, .baz .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/016_test_class_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t016_test_class_unification() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\n.baz {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a .bar.baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/017_test_class_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t017_test_class_unification() {
assert_eq!(
rsass(
"%-a .foo.baz {a: b}\
\n.baz {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/018_test_id_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t018_test_id_unification() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\n#baz {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a .bar#baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/019_test_id_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t019_test_id_unification() {
assert_eq!(
rsass(
"%-a .foo#baz {a: b}\
\n#baz {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a #baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/020_test_universal_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t020_test_universal_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo, -a * {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/021_test_universal_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t021_test_universal_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo, -a *|* {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/022_test_universal_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t022_test_universal_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/023_test_universal_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t023_test_universal_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/024_test_universal_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t024_test_universal_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a ns|*.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/025_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t025_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a * {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/026_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t026_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a * {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/027_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t027_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|*.foo, -a * {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/028_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t028_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|* {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/029_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t029_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/030_test_universal_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t030_test_universal_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|*.foo, -a ns|* {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/031_test_universal_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t031_test_universal_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|*.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/032_test_universal_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t032_test_universal_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|* {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/033_test_universal_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t033_test_universal_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|* {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/034_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t034_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/035_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t035_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/036_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t036_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a.foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/037_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t037_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/038_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t038_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/039_test_universal_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t039_test_universal_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a.foo, -a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/040_test_universal_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t040_test_universal_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\n* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/041_test_universal_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t041_test_universal_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\n*|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/042_test_universal_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t042_test_universal_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\nns|* {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/043_test_element_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t043_test_element_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/044_test_element_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t044_test_element_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a a.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/045_test_element_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t045_test_element_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a *|a.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/046_test_element_unification_with_simple_target.hrx"
#[test]
#[ignore] // unexepected error
fn t046_test_element_unification_with_simple_target() {
assert_eq!(
rsass(
"%-a .foo.bar {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .foo.bar, -a ns|a.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/047_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t047_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *.foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/048_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t048_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *.foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/049_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t049_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|*.foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/050_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t050_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|*.foo, -a *|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/051_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t051_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/052_test_element_unification_with_namespaceless_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t052_test_element_unification_with_namespaceless_universal_target() {
assert_eq!(
rsass(
"%-a *|*.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|*.foo, -a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/053_test_element_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t053_test_element_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|*.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/054_test_element_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t054_test_element_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|*.foo, -a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/055_test_element_unification_with_namespaced_universal_target.hrx"
#[test]
#[ignore] // unexepected error
fn t055_test_element_unification_with_namespaced_universal_target() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|*.foo, -a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/056_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t056_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/057_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t057_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/058_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t058_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a.foo, -a a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/059_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t059_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/060_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t060_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a a.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/061_test_element_unification_with_namespaceless_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t061_test_element_unification_with_namespaceless_element_target() {
assert_eq!(
rsass(
"%-a *|a.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a *|a.foo, -a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/062_test_element_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t062_test_element_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\na {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/063_test_element_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t063_test_element_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\n*|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/064_test_element_unification_with_namespaced_element_target.hrx"
#[test]
#[ignore] // unexepected error
fn t064_test_element_unification_with_namespaced_element_target() {
assert_eq!(
rsass(
"%-a ns|a.foo {a: b}\
\nns|a {@extend .foo} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/065_test_attribute_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t065_test_attribute_unification() {
assert_eq!(
rsass(
"%-a [foo=bar].baz {a: b}\
\n[foo=baz] {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a [foo=bar].baz, -a [foo=bar][foo=baz] {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/066_test_attribute_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t066_test_attribute_unification() {
assert_eq!(
rsass(
"%-a [foo=bar].baz {a: b}\
\n[foo^=bar] {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a [foo=bar].baz, -a [foo=bar][foo^=bar] {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/067_test_attribute_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t067_test_attribute_unification() {
assert_eq!(
rsass(
"%-a [foo=bar].baz {a: b}\
\n[foot=bar] {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a [foo=bar].baz, -a [foo=bar][foot=bar] {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/068_test_attribute_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t068_test_attribute_unification() {
assert_eq!(
rsass(
"%-a [foo=bar].baz {a: b}\
\n[ns|foo=bar] {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a [foo=bar].baz, -a [foo=bar][ns|foo=bar] {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/069_test_attribute_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t069_test_attribute_unification() {
assert_eq!(
rsass(
"%-a %-a [foo=bar].bar {a: b}\
\n[foo=bar] {@extend .bar} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a -a [foo=bar] {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/070_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t070_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a :foo.baz {a: b}\
\n:foo(2n+1) {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :foo.baz, -a :foo:foo(2n+1) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/071_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t071_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a :foo.baz {a: b}\
\n::foo {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :foo.baz, -a :foo::foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/072_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t072_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a ::foo.baz {a: b}\
\n::foo {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ::foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/073_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t073_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a ::foo(2n+1).baz {a: b}\
\n::foo(2n+1) {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a ::foo(2n+1) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/074_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t074_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a :foo.baz {a: b}\
\n:bar {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :foo.baz, -a :foo:bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/075_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t075_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a .baz:foo {a: b}\
\n:after {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .baz:foo, -a :foo:after {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/076_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t076_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a .baz:after {a: b}\
\n:foo {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a .baz:after, -a :foo:after {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/077_test_pseudo_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t077_test_pseudo_unification() {
assert_eq!(
rsass(
"%-a :foo.baz {a: b}\
\n:foo {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/078_test_pseudoelement_remains_at_end_of_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t078_test_pseudoelement_remains_at_end_of_selector() {
assert_eq!(
rsass(
".foo::bar {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
".foo::bar, .baz::bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/079_test_pseudoelement_remains_at_end_of_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t079_test_pseudoelement_remains_at_end_of_selector() {
assert_eq!(
rsass(
"a.foo::bar {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
"a.foo::bar, a.baz::bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/080_test_pseudoclass_remains_at_end_of_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t080_test_pseudoclass_remains_at_end_of_selector() {
assert_eq!(
rsass(
".foo:bar {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
".foo:bar, .baz:bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/081_test_pseudoclass_remains_at_end_of_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t081_test_pseudoclass_remains_at_end_of_selector() {
assert_eq!(
rsass(
"a.foo:bar {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
"a.foo:bar, a.baz:bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/082_test_not_remains_at_end_of_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t082_test_not_remains_at_end_of_selector() {
assert_eq!(
rsass(
".foo:not(.bar) {a: b}\
\n.baz {@extend .foo}\
\n"
)
.unwrap(),
".foo:not(.bar), .baz:not(.bar) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/083_test_pseudoelement_goes_lefter_than_pseudoclass.hrx"
#[test]
#[ignore] // unexepected error
fn t083_test_pseudoelement_goes_lefter_than_pseudoclass() {
assert_eq!(
rsass(
".foo::bar {a: b}\
\n.baz:bang {@extend .foo}\
\n"
)
.unwrap(),
".foo::bar, .baz:bang::bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/084_test_pseudoelement_goes_lefter_than_pseudoclass.hrx"
#[test]
#[ignore] // unexepected error
fn t084_test_pseudoelement_goes_lefter_than_pseudoclass() {
assert_eq!(
rsass(
".foo:bar {a: b}\
\n.baz::bang {@extend .foo}\
\n"
)
.unwrap(),
".foo:bar, .baz:bar::bang {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/085_test_pseudoelement_goes_lefter_than_not.hrx"
#[test]
#[ignore] // unexepected error
fn t085_test_pseudoelement_goes_lefter_than_not() {
assert_eq!(
rsass(
".foo::bar {a: b}\
\n.baz:not(.bang) {@extend .foo}\
\n"
)
.unwrap(),
".foo::bar, .baz:not(.bang)::bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/086.1_test_pseudoelement_goes_lefter_than_not.hrx"
#[test]
#[ignore] // wrong result
fn t086_1_test_pseudoelement_goes_lefter_than_not() {
assert_eq!(
rsass(
"%a {\
\n x:y;\
\n}\
\nb:after:not(:first-child) {\
\n @extend %a;\
\n}\
\nc:s {\
\n @extend %a; \
\n}\
\nd::e {\
\n @extend c;\
\n}"
)
.unwrap(),
"c:s, d:s::e, b:after:not(:first-child) {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/086_test_pseudoelement_goes_lefter_than_not.hrx"
#[test]
#[ignore] // unexepected error
fn t086_test_pseudoelement_goes_lefter_than_not() {
assert_eq!(
rsass(
".foo:not(.bang) {a: b}\
\n.baz::bar {@extend .foo}\
\n"
)
.unwrap(),
".foo:not(.bang), .baz:not(.bang)::bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/087_test_negation_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t087_test_negation_unification() {
assert_eq!(
rsass(
"%-a :not(.foo).baz {a: b}\
\n:not(.bar) {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :not(.foo).baz, -a :not(.foo):not(.bar) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/088_test_negation_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t088_test_negation_unification() {
assert_eq!(
rsass(
"%-a :not(.foo).baz {a: b}\
\n:not(.foo) {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :not(.foo) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/089_test_negation_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t089_test_negation_unification() {
assert_eq!(
rsass(
"%-a :not([a=b]).baz {a: b}\
\n:not([a = b]) {@extend .baz} -a {@extend %-a}\
\n"
)
.unwrap(),
"-a :not([a=b]) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/090_test_comma_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t090_test_comma_extendee() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {c: d}\
\n.baz {@extend .foo, .bar}\
\n"
)
.unwrap(),
".foo, .baz {\
\n a: b;\
\n}\
\n.bar, .baz {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/091_test_redundant_selector_elimination.hrx"
#[test]
#[ignore] // unexepected error
fn t091_test_redundant_selector_elimination() {
assert_eq!(
rsass(
".foo.bar {a: b}\
\n.x {@extend .foo, .bar}\
\n.y {@extend .foo, .bar}\
\n"
)
.unwrap(),
".foo.bar, .y, .x {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/094_test_long_extendee_runs_unification.hrx"
// Ignoring "t094_test_long_extendee_runs_unification", error tests are not supported yet.
// From "sass-spec/spec/non_conformant/extend-tests/095_test_long_extender.hrx"
#[test]
#[ignore] // unexepected error
fn t095_test_long_extender() {
assert_eq!(
rsass(
".foo.bar {a: b}\
\n.baz.bang {@extend .foo}\
\n"
)
.unwrap(),
".foo.bar, .bar.baz.bang {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/096_test_long_extender_runs_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t096_test_long_extender_runs_unification() {
assert_eq!(
rsass(
"ns|*.foo.bar {a: b}\
\na.baz {@extend .foo}\
\n"
)
.unwrap(),
"ns|*.foo.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/097_test_nested_extender.hrx"
#[test]
#[ignore] // unexepected error
fn t097_test_nested_extender() {
assert_eq!(
rsass(
".foo {a: b}\
\nfoo bar {@extend .foo}\
\n"
)
.unwrap(),
".foo, foo bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/098_test_nested_extender_runs_unification.hrx"
#[test]
#[ignore] // unexepected error
fn t098_test_nested_extender_runs_unification() {
assert_eq!(
rsass(
".foo.bar {a: b}\
\nfoo bar {@extend .foo}\
\n"
)
.unwrap(),
".foo.bar, foo bar.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/099_test_nested_extender_alternates_parents.hrx"
#[test]
#[ignore] // unexepected error
fn t099_test_nested_extender_alternates_parents() {
assert_eq!(
rsass(
".baz .bip .foo {a: b}\
\nfoo .grank bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .bip .foo, .baz .bip foo .grank bar, foo .grank .baz .bip bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/100_test_nested_extender_unifies_identical_parents.hrx"
#[test]
#[ignore] // unexepected error
fn t100_test_nested_extender_unifies_identical_parents() {
assert_eq!(
rsass(
".baz .bip .foo {a: b}\
\n.baz .bip bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .bip .foo, .baz .bip bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/101_test_nested_extender_unifies_common_substring.hrx"
#[test]
#[ignore] // unexepected error
fn t101_test_nested_extender_unifies_common_substring() {
assert_eq!(
rsass(
".baz .bip .bap .bink .foo {a: b}\
\n.brat .bip .bap bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .bip .bap .bink .foo, .baz .brat .bip .bap .bink bar, .brat .baz .bip .bap .bink bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/102_test_nested_extender_unifies_common_subseq.hrx"
#[test]
#[ignore] // unexepected error
fn t102_test_nested_extender_unifies_common_subseq() {
assert_eq!(
rsass(
".a .x .b .y .foo {a: b}\
\n.a .n .b .m bar {@extend .foo}\
\n"
)
.unwrap(),
".a .x .b .y .foo, .a .x .n .b .y .m bar, .a .n .x .b .y .m bar, .a .x .n .b .m .y bar, .a .n .x .b .m .y bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/103_test_nested_extender_chooses_first_subseq.hrx"
#[test]
#[ignore] // unexepected error
fn t103_test_nested_extender_chooses_first_subseq() {
assert_eq!(
rsass(
".a .b .c .d .foo {a: b}\
\n.c .d .a .b .bar {@extend .foo}\
\n"
)
.unwrap(),
".a .b .c .d .foo, .a .b .c .d .a .b .bar {\
\n a: b;\
\n}\
\n"
);
}
mod t104_test_nested_extender_counts_extended_subselectors;
// From "sass-spec/spec/non_conformant/extend-tests/105_test_nested_extender_counts_extended_superselectors.hrx"
#[test]
#[ignore] // unexepected error
fn t105_test_nested_extender_counts_extended_superselectors() {
assert_eq!(
rsass(
".a .bip .foo {a: b}\
\n.b .bip.bop .bar {@extend .foo}\
\n"
)
.unwrap(),
".a .bip .foo, .a .b .bip.bop .bar, .b .a .bip.bop .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/106_test_nested_extender_with_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t106_test_nested_extender_with_child_selector() {
assert_eq!(
rsass(
".baz .foo {a: b}\
\nfoo > bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .foo, .baz foo > bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/107_test_nested_extender_finds_common_selectors_around_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t107_test_nested_extender_finds_common_selectors_around_child_selector() {
assert_eq!(
rsass(
"a > b c .c1 {a: b}\
\na c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a > b c .c1, a > b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/108_test_nested_extender_finds_common_selectors_around_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t108_test_nested_extender_finds_common_selectors_around_child_selector() {
assert_eq!(
rsass(
"a > b c .c1 {a: b}\
\nb c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a > b c .c1, a > b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/109_test_nested_extender_finds_common_selectors_around_adjacent_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t109_test_nested_extender_finds_common_selectors_around_adjacent_sibling()
{
assert_eq!(
rsass(
"a + b c .c1 {a: b}\
\na c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a + b c .c1, a + b a c .c2, a a + b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/110_test_nested_extender_finds_common_selectors_around_adjacent_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t110_test_nested_extender_finds_common_selectors_around_adjacent_sibling()
{
assert_eq!(
rsass(
"a + b c .c1 {a: b}\
\na b .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a + b c .c1, a a + b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/111_test_nested_extender_finds_common_selectors_around_adjacent_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t111_test_nested_extender_finds_common_selectors_around_adjacent_sibling()
{
assert_eq!(
rsass(
"a + b c .c1 {a: b}\
\nb c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a + b c .c1, a + b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/112_test_nested_extender_finds_common_selectors_around_sibling_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t112_test_nested_extender_finds_common_selectors_around_sibling_selector()
{
assert_eq!(
rsass(
"a ~ b c .c1 {a: b}\
\na c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a ~ b c .c1, a ~ b a c .c2, a a ~ b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/113_test_nested_extender_finds_common_selectors_around_sibling_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t113_test_nested_extender_finds_common_selectors_around_sibling_selector()
{
assert_eq!(
rsass(
"a ~ b c .c1 {a: b}\
\na b .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a ~ b c .c1, a a ~ b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/114_test_nested_extender_finds_common_selectors_around_sibling_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t114_test_nested_extender_finds_common_selectors_around_sibling_selector()
{
assert_eq!(
rsass(
"a ~ b c .c1 {a: b}\
\nb c .c2 {@extend .c1}\
\n"
)
.unwrap(),
"a ~ b c .c1, a ~ b c .c2 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/118_test_nested_extender_with_early_child_selectors_doesnt_subseq_them.hrx"
#[test]
#[ignore] // unexepected error
fn t118_test_nested_extender_with_early_child_selectors_doesnt_subseq_them() {
assert_eq!(
rsass(
".bip > .bap .foo {a: b}\
\n.grip > .bap .bar {@extend .foo}\
\n"
)
.unwrap(),
".bip > .bap .foo, .bip > .bap .grip > .bap .bar, .grip > .bap .bip > .bap .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/119_test_nested_extender_with_early_child_selectors_doesnt_subseq_them.hrx"
#[test]
#[ignore] // unexepected error
fn t119_test_nested_extender_with_early_child_selectors_doesnt_subseq_them() {
assert_eq!(
rsass(
".bap > .bip .foo {a: b}\
\n.bap > .grip .bar {@extend .foo}\
\n"
)
.unwrap(),
".bap > .bip .foo, .bap > .bip .bap > .grip .bar, .bap > .grip .bap > .bip .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/120_test_nested_extender_with_child_selector_unifies.hrx"
#[test]
#[ignore] // unexepected error
fn t120_test_nested_extender_with_child_selector_unifies() {
assert_eq!(
rsass(
".baz.foo {a: b}\
\nfoo > bar {@extend .foo}\
\n"
)
.unwrap(),
".baz.foo, foo > bar.baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/121_test_nested_extender_with_child_selector_unifies.hrx"
#[test]
#[ignore] // unexepected error
fn t121_test_nested_extender_with_child_selector_unifies() {
assert_eq!(
rsass(
".baz > {\
\n.foo {a: b}\
\n.bar {@extend .foo}\
\n}\
\n"
)
.unwrap(),
".baz > .foo, .baz > .bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/122_test_nested_extender_with_child_selector_unifies.hrx"
#[test]
#[ignore] // unexepected error
fn t122_test_nested_extender_with_child_selector_unifies() {
assert_eq!(
rsass(
".foo {\
\n.bar {a: b}\
\n> .baz {@extend .bar}\
\n}\
\n"
)
.unwrap(),
".foo .bar, .foo > .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/123_test_nested_extender_with_early_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t123_test_nested_extender_with_early_child_selector() {
assert_eq!(
rsass(
".foo {\
\n.bar {a: b}\
\n.bip > .baz {@extend .bar}\
\n}\
\n"
)
.unwrap(),
".foo .bar, .foo .bip > .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/124_test_nested_extender_with_early_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t124_test_nested_extender_with_early_child_selector() {
assert_eq!(
rsass(
".foo {\
\n.bip .bar {a: b}\
\n> .baz {@extend .bar}\
\n}\
\n"
)
.unwrap(),
".foo .bip .bar, .foo .bip .foo > .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/125_test_nested_extender_with_early_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t125_test_nested_extender_with_early_child_selector() {
assert_eq!(
rsass(
".foo > .bar {a: b}\
\n.bip + .baz {@extend .bar}\
\n"
)
.unwrap(),
".foo > .bar, .foo > .bip + .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/126_test_nested_extender_with_early_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t126_test_nested_extender_with_early_child_selector() {
assert_eq!(
rsass(
".foo + .bar {a: b}\
\n.bip > .baz {@extend .bar}\
\n"
)
.unwrap(),
".foo + .bar, .bip > .foo + .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/127_test_nested_extender_with_early_child_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t127_test_nested_extender_with_early_child_selector() {
assert_eq!(
rsass(
".foo > .bar {a: b}\
\n.bip > .baz {@extend .bar}\
\n"
)
.unwrap(),
".foo > .bar, .bip.foo > .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/128_test_nested_extender_with_sibling_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t128_test_nested_extender_with_sibling_selector() {
assert_eq!(
rsass(
".baz .foo {a: b}\
\nfoo + bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .foo, .baz foo + bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/129_test_nested_extender_with_hacky_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t129_test_nested_extender_with_hacky_selector() {
assert_eq!(
rsass(
".baz .foo {a: b}\
\nfoo + > > + bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .foo, .baz foo + > > + bar, foo .baz + > > + bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/130_test_nested_extender_with_hacky_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t130_test_nested_extender_with_hacky_selector() {
assert_eq!(
rsass(
".baz .foo {a: b}\
\n> > bar {@extend .foo}\
\n"
)
.unwrap(),
".baz .foo, > > .baz bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/131_test_nested_extender_merges_with_same_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t131_test_nested_extender_merges_with_same_selector() {
assert_eq!(
rsass(
".foo {\
\n.bar {a: b}\
\n.baz {@extend .bar} }\
\n"
)
.unwrap(),
".foo .bar, .foo .baz {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/132_test_nested_extender_with_child_selector_merges_with_same_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t132_test_nested_extender_with_child_selector_merges_with_same_selector() {
assert_eq!(
rsass(
".foo > .bar .baz {a: b}\
\n.foo > .bar .bang {@extend .baz}\
\n"
)
.unwrap(),
".foo > .bar .baz, .foo > .bar .bang {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/133_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t133_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a > + x {a: b}\
\n.b y {@extend x}\
\n"
)
.unwrap(),
".a > + x, .a .b > + y, .b .a > + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/134_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t134_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a x {a: b}\
\n.b > + y {@extend x}\
\n"
)
.unwrap(),
".a x, .a .b > + y, .b .a > + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/135_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t135_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a > + x {a: b}\
\n.b > + y {@extend x}\
\n"
)
.unwrap(),
".a > + x, .a .b > + y, .b .a > + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/136_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t136_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a ~ > + x {a: b}\
\n.b > + y {@extend x}\
\n"
)
.unwrap(),
".a ~ > + x, .a .b ~ > + y, .b .a ~ > + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/137_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t137_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a + > x {a: b}\
\n.b > + y {@extend x}\
\n"
)
.unwrap(),
".a + > x {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/138_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t138_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a + > x {a: b}\
\n.b > + y {@extend x}\
\n"
)
.unwrap(),
".a + > x {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/139_test_combinator_unification_for_hacky_combinators.hrx"
#[test]
#[ignore] // unexepected error
fn t139_test_combinator_unification_for_hacky_combinators() {
assert_eq!(
rsass(
".a ~ > + .b > x {a: b}\
\n.c > + .d > y {@extend x}\
\n"
)
.unwrap(),
".a ~ > + .b > x, .a .c ~ > + .d.b > y, .c .a ~ > + .d.b > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/140_test_combinator_unification_double_tilde.hrx"
#[test]
#[ignore] // unexepected error
fn t140_test_combinator_unification_double_tilde() {
assert_eq!(
rsass(
".a.b ~ x {a: b}\
\n.a ~ y {@extend x}\
\n"
)
.unwrap(),
".a.b ~ x, .a.b ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/141_test_combinator_unification_double_tilde.hrx"
#[test]
#[ignore] // unexepected error
fn t141_test_combinator_unification_double_tilde() {
assert_eq!(
rsass(
".a ~ x {a: b}\
\n.a.b ~ y {@extend x}\
\n"
)
.unwrap(),
".a ~ x, .a.b ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/142_test_combinator_unification_double_tilde.hrx"
#[test]
#[ignore] // unexepected error
fn t142_test_combinator_unification_double_tilde() {
assert_eq!(
rsass(
".a ~ x {a: b}\
\n.b ~ y {@extend x}\
\n"
)
.unwrap(),
".a ~ x, .a ~ .b ~ y, .b ~ .a ~ y, .b.a ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/143_test_combinator_unification_double_tilde.hrx"
#[test]
#[ignore] // unexepected error
fn t143_test_combinator_unification_double_tilde() {
assert_eq!(
rsass(
"a.a ~ x {a: b}\
\nb.b ~ y {@extend x}\
\n"
)
.unwrap(),
"a.a ~ x, a.a ~ b.b ~ y, b.b ~ a.a ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/144_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t144_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a.b + x {a: b}\
\n.a ~ y {@extend x}\
\n"
)
.unwrap(),
".a.b + x, .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/145_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t145_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.a.b ~ y {@extend x}\
\n"
)
.unwrap(),
".a + x, .a.b ~ .a + y, .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/146_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t146_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.b ~ y {@extend x}\
\n"
)
.unwrap(),
".a + x, .b ~ .a + y, .b.a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/147_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t147_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
"a.a + x {a: b}\
\nb.b ~ y {@extend x}\
\n"
)
.unwrap(),
"a.a + x, b.b ~ a.a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/148_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t148_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a.b ~ x {a: b}\
\n.a + y {@extend x}\
\n"
)
.unwrap(),
".a.b ~ x, .a.b ~ .a + y, .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/149_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t149_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a ~ x {a: b}\
\n.a.b + y {@extend x}\
\n"
)
.unwrap(),
".a ~ x, .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/150_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t150_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
".a ~ x {a: b}\
\n.b + y {@extend x}\
\n"
)
.unwrap(),
".a ~ x, .a ~ .b + y, .b.a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/151_test_combinator_unification_tilde_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t151_test_combinator_unification_tilde_plus() {
assert_eq!(
rsass(
"a.a ~ x {a: b}\
\nb.b + y {@extend x}\
\n"
)
.unwrap(),
"a.a ~ x, a.a ~ b.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/152_test_combinator_unification_angle_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t152_test_combinator_unification_angle_sibling() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.b ~ y {@extend x}\
\n"
)
.unwrap(),
".a > x, .a > .b ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/153_test_combinator_unification_angle_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t153_test_combinator_unification_angle_sibling() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.b + y {@extend x}\
\n"
)
.unwrap(),
".a > x, .a > .b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/154_test_combinator_unification_angle_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t154_test_combinator_unification_angle_sibling() {
assert_eq!(
rsass(
".a ~ x {a: b}\
\n.b > y {@extend x}\
\n"
)
.unwrap(),
".a ~ x, .b > .a ~ y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/155_test_combinator_unification_angle_sibling.hrx"
#[test]
#[ignore] // unexepected error
fn t155_test_combinator_unification_angle_sibling() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.b > y {@extend x}\
\n"
)
.unwrap(),
".a + x, .b > .a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/156_test_combinator_unification_double_angle.hrx"
#[test]
#[ignore] // unexepected error
fn t156_test_combinator_unification_double_angle() {
assert_eq!(
rsass(
".a.b > x {a: b}\
\n.b > y {@extend x}\
\n"
)
.unwrap(),
".a.b > x, .b.a > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/157_test_combinator_unification_double_angle.hrx"
#[test]
#[ignore] // unexepected error
fn t157_test_combinator_unification_double_angle() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.a.b > y {@extend x}\
\n"
)
.unwrap(),
".a > x, .a.b > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/158_test_combinator_unification_double_angle.hrx"
#[test]
#[ignore] // unexepected error
fn t158_test_combinator_unification_double_angle() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.b > y {@extend x}\
\n"
)
.unwrap(),
".a > x, .b.a > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/159_test_combinator_unification_double_angle.hrx"
#[test]
#[ignore] // unexepected error
fn t159_test_combinator_unification_double_angle() {
assert_eq!(
rsass(
"a.a > x {a: b}\
\nb.b > y {@extend x}\
\n"
)
.unwrap(),
"a.a > x {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/160_test_combinator_unification_double_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t160_test_combinator_unification_double_plus() {
assert_eq!(
rsass(
".a.b + x {a: b}\
\n.b + y {@extend x}\
\n"
)
.unwrap(),
".a.b + x, .b.a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/161_test_combinator_unification_double_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t161_test_combinator_unification_double_plus() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.a.b + y {@extend x}\
\n"
)
.unwrap(),
".a + x, .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/162_test_combinator_unification_double_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t162_test_combinator_unification_double_plus() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.b + y {@extend x}\
\n"
)
.unwrap(),
".a + x, .b.a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/163_test_combinator_unification_double_plus.hrx"
#[test]
#[ignore] // unexepected error
fn t163_test_combinator_unification_double_plus() {
assert_eq!(
rsass(
"a.a + x {a: b}\
\nb.b + y {@extend x}\
\n"
)
.unwrap(),
"a.a + x {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/164_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t164_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a.b > x {a: b}\
\n.a y {@extend x}\
\n"
)
.unwrap(),
".a.b > x, .a.b > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/165_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t165_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.a.b y {@extend x}\
\n"
)
.unwrap(),
".a > x, .a.b .a > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/166_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t166_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a > x {a: b}\
\n.b y {@extend x}\
\n"
)
.unwrap(),
".a > x, .b .a > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/167_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t167_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a.b x {a: b}\
\n.a > y {@extend x}\
\n"
)
.unwrap(),
".a.b x, .a.b .a > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/168_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t168_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a x {a: b}\
\n.a.b > y {@extend x}\
\n"
)
.unwrap(),
".a x, .a.b > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/169_test_combinator_unification_angle_space.hrx"
#[test]
#[ignore] // unexepected error
fn t169_test_combinator_unification_angle_space() {
assert_eq!(
rsass(
".a x {a: b}\
\n.b > y {@extend x}\
\n"
)
.unwrap(),
".a x, .a .b > y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/170_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t170_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a.b + x {a: b}\
\n.a y {@extend x}\
\n"
)
.unwrap(),
".a.b + x, .a .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/171_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t171_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.a.b y {@extend x}\
\n"
)
.unwrap(),
".a + x, .a.b .a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/172_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t172_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a + x {a: b}\
\n.b y {@extend x}\
\n"
)
.unwrap(),
".a + x, .b .a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/173_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t173_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a.b x {a: b}\
\n.a + y {@extend x}\
\n"
)
.unwrap(),
".a.b x, .a.b .a + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/174_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t174_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a x {a: b}\
\n.a.b + y {@extend x}\
\n"
)
.unwrap(),
".a x, .a .a.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/175_test_combinator_unification_plus_space.hrx"
#[test]
#[ignore] // unexepected error
fn t175_test_combinator_unification_plus_space() {
assert_eq!(
rsass(
".a x {a: b}\
\n.b + y {@extend x}\
\n"
)
.unwrap(),
".a x, .a .b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/176_test_combinator_unification_nested.hrx"
#[test]
#[ignore] // unexepected error
fn t176_test_combinator_unification_nested() {
assert_eq!(
rsass(
".a > .b + x {a: b}\
\n.c > .d + y {@extend x}\
\n"
)
.unwrap(),
".a > .b + x, .c.a > .d.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/177_test_combinator_unification_nested.hrx"
#[test]
#[ignore] // unexepected error
fn t177_test_combinator_unification_nested() {
assert_eq!(
rsass(
".a > .b + x {a: b}\
\n.c > y {@extend x}\
\n"
)
.unwrap(),
".a > .b + x, .c.a > .b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/178_test_combinator_unification_with_newlines.hrx"
#[test]
#[ignore] // unexepected error
fn t178_test_combinator_unification_with_newlines() {
assert_eq!(
rsass(
".a >\
\n.b\
\n+ x {a: b}\
\n.c\
\n> .d +\
\ny {@extend x}\
\n"
)
.unwrap(),
".a > .b + x, .c.a > .d.b + y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/179_test_extend_self_loop.hrx"
#[test]
#[ignore] // unexepected error
fn t179_test_extend_self_loop() {
assert_eq!(
rsass(
".foo {a: b; @extend .foo}\
\n"
)
.unwrap(),
".foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/180_test_basic_extend_loop.hrx"
#[test]
#[ignore] // unexepected error
fn t180_test_basic_extend_loop() {
assert_eq!(
rsass(
".foo {a: b; @extend .bar}\
\n.bar {c: d; @extend .foo}\
\n"
)
.unwrap(),
".foo, .bar {\
\n a: b;\
\n}\
\n.bar, .foo {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/181_test_three_level_extend_loop.hrx"
#[test]
#[ignore] // unexepected error
fn t181_test_three_level_extend_loop() {
assert_eq!(
rsass(
".foo {a: b; @extend .bar}\
\n.bar {c: d; @extend .baz}\
\n.baz {e: f; @extend .foo}\
\n"
)
.unwrap(),
".foo, .baz, .bar {\
\n a: b;\
\n}\
\n.bar, .foo, .baz {\
\n c: d;\
\n}\
\n.baz, .bar, .foo {\
\n e: f;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/182_test_nested_extend_loop.hrx"
#[test]
#[ignore] // unexepected error
fn t182_test_nested_extend_loop() {
assert_eq!(
rsass(
".bar {\
\na: b;\
\n.foo {c: d; @extend .bar}\
\n}\
\n"
)
.unwrap(),
".bar, .bar .foo {\
\n a: b;\
\n}\
\n.bar .foo {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/183_test_multiple_extender_merges_with_superset_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t183_test_multiple_extender_merges_with_superset_selector() {
assert_eq!(
rsass(
".foo {@extend .bar; @extend .baz}\
\na.bar.baz {a: b}\
\n"
)
.unwrap(),
"a.bar.baz, a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/184_test_control_flow_if.hrx"
#[test]
#[ignore] // wrong result
fn t184_test_control_flow_if() {
assert_eq!(
rsass(
".true { color: green; }\
\n.false { color: red; }\
\n.also-true {\
\n@if true { @extend .true; }\
\n@else { @extend .false; }\
\n}\
\n.also-false {\
\n@if false { @extend .true; }\
\n@else { @extend .false; }\
\n}\
\n"
)
.unwrap(),
".true, .also-true {\
\n color: green;\
\n}\
\n.false, .also-false {\
\n color: red;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/185_test_control_flow_for.hrx"
#[test]
#[ignore] // wrong result
fn t185_test_control_flow_for() {
assert_eq!(
rsass(
".base-0 { color: green; }\
\n.base-1 { display: block; }\
\n.base-2 { border: 1px solid blue; }\
\n.added {\
\n@for $i from 0 to 3 {\
\n @extend .base-#{$i};\
\n}\
\n}\
\n"
)
.unwrap(),
".base-0, .added {\
\n color: green;\
\n}\
\n.base-1, .added {\
\n display: block;\
\n}\
\n.base-2, .added {\
\n border: 1px solid blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/186_test_control_flow_while.hrx"
#[test]
#[ignore] // wrong result
fn t186_test_control_flow_while() {
assert_eq!(
rsass(
".base-0 { color: green; }\
\n.base-1 { display: block; }\
\n.base-2 { border: 1px solid blue; }\
\n.added {\
\n$i : 0;\
\n@while $i < 3 {\
\n @extend .base-#{$i};\
\n $i : $i + 1;\
\n}\
\n}\
\n"
)
.unwrap(),
".base-0, .added {\
\n color: green;\
\n}\
\n.base-1, .added {\
\n display: block;\
\n}\
\n.base-2, .added {\
\n border: 1px solid blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/187_test_basic_placeholder_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t187_test_basic_placeholder_selector() {
assert_eq!(
rsass(
"%foo {a: b}\
\n.bar {@extend %foo}\
\n"
)
.unwrap(),
".bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/188_test_unused_placeholder_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t188_test_unused_placeholder_selector() {
assert_eq!(
rsass(
"%foo {color: blue}\
\n%bar {color: red}\
\n.baz {@extend %foo}\
\n"
)
.unwrap(),
".baz {\
\n color: blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/189_test_placeholder_descendant_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t189_test_placeholder_descendant_selector() {
assert_eq!(
rsass(
"#context %foo a {a: b}\
\n.bar {@extend %foo}\
\n"
)
.unwrap(),
"#context .bar a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/190_test_semi_placeholder_selector.hrx"
#[test]
#[ignore] // wrong result
fn t190_test_semi_placeholder_selector() {
assert_eq!(
rsass(
"#context %foo, .bar .baz {color: blue}\
\n\
\n.bat {\
\n @extend %foo;\
\n}\
\n"
)
.unwrap(),
"#context .bat, .bar .baz {\
\n color: blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/191_test_placeholder_selector_with_multiple_extenders.hrx"
#[test]
#[ignore] // unexepected error
fn t191_test_placeholder_selector_with_multiple_extenders() {
assert_eq!(
rsass(
"%foo {color: blue}\
\n.bar {@extend %foo}\
\n.baz {@extend %foo}\
\n"
)
.unwrap(),
".baz, .bar {\
\n color: blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/192_test_placeholder_interpolation.hrx"
#[test]
#[ignore] // unexepected error
fn t192_test_placeholder_interpolation() {
assert_eq!(
rsass(
"$foo: foo;\
\n\
\n%#{$foo} {color: blue}\
\n.bar {@extend %foo}\
\n"
)
.unwrap(),
".bar {\
\n color: blue;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/193_test_media_in_placeholder_selector.hrx"
#[test]
#[ignore] // wrong result
fn t193_test_media_in_placeholder_selector() {
assert_eq!(
rsass(
"%foo {bar {@media screen {a {b: c}}}}\
\n.baz {c: d}\
\n"
)
.unwrap(),
".baz {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/194_test_extend_within_media.hrx"
#[test]
#[ignore] // unexepected error
fn t194_test_extend_within_media() {
assert_eq!(
rsass(
"@media screen {\
\n.foo {a: b}\
\n.bar {@extend .foo}\
\n}\
\n"
)
.unwrap(),
"@media screen {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/195_test_extend_within_unknown_directive.hrx"
#[test]
#[ignore] // unexepected error
fn t195_test_extend_within_unknown_directive() {
assert_eq!(
rsass(
"@flooblehoof {\
\n.foo {a: b}\
\n.bar {@extend .foo}\
\n}\
\n"
)
.unwrap(),
"@flooblehoof {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/196_test_extend_within_nested_directives.hrx"
#[test]
#[ignore] // unexepected error
fn t196_test_extend_within_nested_directives() {
assert_eq!(
rsass(
"@media screen {\
\n@flooblehoof {\
\n .foo {a: b}\
\n .bar {@extend .foo}\
\n}\
\n}\
\n"
)
.unwrap(),
"@media screen {\
\n @flooblehoof {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/197_test_extend_within_disparate_media.hrx"
#[test]
#[ignore] // unexepected error
fn t197_test_extend_within_disparate_media() {
assert_eq!(
rsass(
"@media screen {.foo {a: b}}\
\n@media screen {.bar {@extend .foo}}\
\n"
)
.unwrap(),
"@media screen {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/198_test_extend_within_disparate_unknown_directive.hrx"
#[test]
#[ignore] // unexepected error
fn t198_test_extend_within_disparate_unknown_directive() {
assert_eq!(
rsass(
"@flooblehoof {.foo {a: b}}\
\n@flooblehoof {.bar {@extend .foo}}\
\n"
)
.unwrap(),
"@flooblehoof {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n}\
\n@flooblehoof {}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/199_test_extend_within_disparate_nested_directives.hrx"
#[test]
#[ignore] // unexepected error
fn t199_test_extend_within_disparate_nested_directives() {
assert_eq!(
rsass(
"@media screen {@flooblehoof {.foo {a: b}}}\
\n@media screen {@flooblehoof {.bar {@extend .foo}}}\
\n"
)
.unwrap(),
"@media screen {\
\n @flooblehoof {\
\n .foo, .bar {\
\n a: b;\
\n }\
\n }\
\n}\
\n@media screen {\
\n @flooblehoof {}\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/206_test_extend_succeeds_when_one_extension_fails_but_others_dont.hrx"
#[test]
#[ignore] // unexepected error
fn t206_test_extend_succeeds_when_one_extension_fails_but_others_dont() {
assert_eq!(
rsass(
"a.bar {a: b}\
\n.bar {c: d}\
\nb.foo {@extend .bar}\
\n"
)
.unwrap(),
"a.bar {\
\n a: b;\
\n}\
\n.bar, b.foo {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/207_test_optional_extend_succeeds_when_extendee_doesnt_exist.hrx"
#[test]
#[ignore] // unexepected error
fn t207_test_optional_extend_succeeds_when_extendee_doesnt_exist() {
assert_eq!(
rsass(
".foo {@extend .bar !optional}\
\n"
)
.unwrap(),
""
);
}
// From "sass-spec/spec/non_conformant/extend-tests/208_test_optional_extend_succeeds_when_extension_fails.hrx"
#[test]
#[ignore] // unexepected error
fn t208_test_optional_extend_succeeds_when_extension_fails() {
assert_eq!(
rsass(
"a.bar {a: b}\
\nb.foo {@extend .bar !optional}\
\n"
)
.unwrap(),
"a.bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/209_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t209_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b} // Add an id to make the results have high specificity\
\n%y, %y::fblthp {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar, a#bar::fblthp {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/210_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t210_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b}\
\n%y, %y:fblthp {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/211_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t211_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b}\
\n%y, %y:first-line {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar, a#bar:first-line {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/212_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t212_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b}\
\n%y, %y:first-letter {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar, a#bar:first-letter {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/213_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t213_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b}\
\n%y, %y:before {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar, a#bar:before {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/214_test_pseudo_element_superselector.hrx"
#[test]
#[ignore] // unexepected error
fn t214_test_pseudo_element_superselector() {
assert_eq!(
rsass(
"%x#bar {a: b}\
\n%y, %y:after {@extend %x}\
\na {@extend %y}\
\n"
)
.unwrap(),
"a#bar, a#bar:after {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/215_test_multiple_source_redundancy_elimination.hrx"
#[test]
#[ignore] // unexepected error
fn t215_test_multiple_source_redundancy_elimination() {
assert_eq!(
rsass(
"%default-color {color: red}\
\n%alt-color {color: green}\
\n\
\n%default-style {\
\n@extend %default-color;\
\n&:hover {@extend %alt-color}\
\n&:active {@extend %default-color}\
\n}\
\n\
\n.test-case {@extend %default-style}\
\n"
)
.unwrap(),
".test-case:active, .test-case {\
\n color: red;\
\n}\
\n.test-case:hover {\
\n color: green;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/216_test_nested_sibling_extend.hrx"
#[test]
#[ignore] // unexepected error
fn t216_test_nested_sibling_extend() {
assert_eq!(
rsass(
".foo {@extend .bar}\
\n\
\n.parent {\
\n.bar {\
\n width: 2000px;\
\n}\
\n.foo {\
\n @extend .bar\
\n}\
\n}\
\n"
)
.unwrap(),
".parent .bar, .parent .foo {\
\n width: 2000px;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/217_test_parent_and_sibling_extend.hrx"
#[test]
#[ignore] // unexepected error
fn t217_test_parent_and_sibling_extend() {
assert_eq!(
rsass(
"%foo %bar%baz {c: d}\
\n\
\n.parent1 {\
\n@extend %foo;\
\n.child1 {@extend %bar}\
\n}\
\n\
\n.parent2 {\
\n@extend %foo;\
\n.child2 {@extend %baz}\
\n}\
\n"
)
.unwrap(),
".parent1 .parent2 .child1.child2, .parent2 .parent1 .child1.child2 {\
\n c: d;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/218_test_nested_extend_specificity.hrx"
#[test]
#[ignore] // unexepected error
fn t218_test_nested_extend_specificity() {
assert_eq!(
rsass(
"%foo {a: b}\
\n\
\na {\
\n:b {@extend %foo}\
\n:b:c {@extend %foo}\
\n}\
\n"
)
.unwrap(),
"a :b:c, a :b {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/219_test_nested_double_extend_optimization.hrx"
#[test]
#[ignore] // wrong result
fn t219_test_nested_double_extend_optimization() {
assert_eq!(
rsass(
"%foo %bar {\
\na: b;\
\n}\
\n\
\n.parent1 {\
\n@extend %foo;\
\n\
\n.child {\
\n @extend %bar;\
\n}\
\n}\
\n\
\n.parent2 {\
\n@extend %foo;\
\n}\
\n"
)
.unwrap(),
".parent1 .child {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/220_test_extend_in_double_nested_media_query.hrx"
#[test]
#[ignore] // unexepected error
fn t220_test_extend_in_double_nested_media_query() {
assert_eq!(
rsass(
"@media all {\
\n@media (orientation: landscape) {\
\n %foo {color: blue}\
\n .bar {@extend %foo}\
\n}\
\n}\
\n"
)
.unwrap(),
"@media (orientation: landscape) {\
\n .bar {\
\n color: blue;\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/221_test_partially_failed_extend.hrx"
#[test]
#[ignore] // wrong result
fn t221_test_partially_failed_extend() {
assert_eq!(
rsass(
"test { @extend .rc; }\
\n.rc {color: white;}\
\n.prices span.pill span.rc {color: red;}\
\n"
)
.unwrap(),
".rc, test {\
\n color: white;\
\n}\
\n.prices span.pill span.rc {\
\n color: red;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/222_test_newline_near_combinator.hrx"
#[test]
#[ignore] // unexepected error
fn t222_test_newline_near_combinator() {
assert_eq!(
rsass(
".a +\
\n.b x {a: b}\
\n.c y {@extend x}\
\n"
)
.unwrap(),
".a +\
\n.b x, .a +\
\n.b .c y, .c .a +\
\n.b y {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/223_test_duplicated_selector_with_newlines.hrx"
#[test]
#[ignore] // unexepected error
fn t223_test_duplicated_selector_with_newlines() {
assert_eq!(
rsass(
".example-1-1,\
\n.example-1-2,\
\n.example-1-3 {\
\na: b;\
\n}\
\n\
\n.my-page-1 .my-module-1-1 {@extend .example-1-2}\
\n"
)
.unwrap(),
".example-1-1,\
\n.example-1-2,\
\n.my-page-1 .my-module-1-1,\
\n.example-1-3 {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/224_test_nested_selector_with_child_selector_hack_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t224_test_nested_selector_with_child_selector_hack_extendee() {
assert_eq!(
rsass(
"> .foo {a: b}\
\nfoo bar {@extend .foo}\
\n"
)
.unwrap(),
"> .foo, > foo bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/225_test_nested_selector_with_child_selector_hack_extender.hrx"
#[test]
#[ignore] // unexepected error
fn t225_test_nested_selector_with_child_selector_hack_extender() {
assert_eq!(
rsass(
".foo .bar {a: b}\
\n> foo bar {@extend .bar}\
\n"
)
.unwrap(),
".foo .bar, > .foo foo bar, > foo .foo bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/226_test_nested_selector_with_child_selector_hack_extender_and_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t226_test_nested_selector_with_child_selector_hack_extender_and_extendee()
{
assert_eq!(
rsass(
"> .foo {a: b}\
\n> foo bar {@extend .foo}\
\n"
)
.unwrap(),
"> .foo, > foo bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/227_test_nested_with_child_hack_extender_and_sibling_extendee.hrx"
#[test]
#[ignore] // unexepected error
fn t227_test_nested_with_child_hack_extender_and_sibling_extendee() {
assert_eq!(
rsass(
"~ .foo {a: b}\
\n> foo bar {@extend .foo}\
\n"
)
.unwrap(),
"~ .foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/228_test_nested_with_child_selector_hack_extender_extendee_newline.hrx"
#[test]
#[ignore] // unexepected error
fn t228_test_nested_with_child_selector_hack_extender_extendee_newline() {
assert_eq!(
rsass(
"> .foo {a: b}\
\nflip,\
\n> foo bar {@extend .foo}\
\n"
)
.unwrap(),
"> .foo, > flip,\
\n> foo bar {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/229_test_extended_parent_and_child_redundancy_elimination.hrx"
#[test]
#[ignore] // unexepected error
fn t229_test_extended_parent_and_child_redundancy_elimination() {
assert_eq!(
rsass(
"a {\
\nb {a: b}\
\nc {@extend b}\
\n}\
\nd {@extend a}\
\n"
)
.unwrap(),
"a b, d b, a c, d c {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/230_test_extend_redundancy_elimination_when_it_would_reduce_specificity.hrx"
#[test]
#[ignore] // unexepected error
fn t230_test_extend_redundancy_elimination_when_it_would_reduce_specificity()
{
assert_eq!(
rsass(
"a {a: b}\
\na.foo {@extend a}\
\n"
)
.unwrap(),
"a, a.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/231_test_extend_redundancy_elimination_when_it_would_preserve_specificity.hrx"
#[test]
#[ignore] // unexepected error
fn t231_test_extend_redundancy_elimination_when_it_would_preserve_specificity(
) {
assert_eq!(
rsass(
".bar a {a: b}\
\na.foo {@extend a}\
\n"
)
.unwrap(),
".bar a {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/232_test_extend_redundancy_elimination_never_eliminates_base_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t232_test_extend_redundancy_elimination_never_eliminates_base_selector() {
assert_eq!(
rsass(
"a.foo {a: b}\
\n.foo {@extend a}\
\n"
)
.unwrap(),
"a.foo, .foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/233_test_extend_cross_branch_redundancy_elimination.hrx"
#[test]
#[ignore] // unexepected error
fn t233_test_extend_cross_branch_redundancy_elimination() {
assert_eq!(
rsass(
"%x .c %y {a: b}\
\n.a, .b {@extend %x}\
\n.a .d {@extend %y}\
\n"
)
.unwrap(),
".a .c .d, .b .c .a .d {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/234_test_extend_cross_branch_redundancy_elimination.hrx"
#[test]
#[ignore] // unexepected error
fn t234_test_extend_cross_branch_redundancy_elimination() {
assert_eq!(
rsass(
".e %z {a: b}\
\n%x .c %y {@extend %z}\
\n.a, .b {@extend %x}\
\n.a .d {@extend %y}\
\n"
)
.unwrap(),
".e .a .c .d, .e .b .c .a .d, .a .e .b .c .d, .a .c .e .d, .b .c .e .a .d {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/235_extend_with_universal_selector.hrx"
#[test]
#[ignore] // unexepected error
fn t235_extend_with_universal_selector() {
assert_eq!(
rsass(
"%-a *.foo1 {a: b}\
\na {@extend .foo1}\
\n-a {@extend %-a}\
\n\
\n%-b *|*.foo2 {b: b}\
\nb {@extend .foo2}\
\n-b {@extend %-b}\
\n"
)
.unwrap(),
"-a *.foo1, -a a {\
\n a: b;\
\n}\
\n-b *|*.foo2, -b b {\
\n b: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/236_extend_with_universal_selector_empty_namespace.hrx"
#[test]
#[ignore] // unexepected error
fn t236_extend_with_universal_selector_empty_namespace() {
assert_eq!(
rsass(
"%-a |*.foo {a: b}\
\na {@extend .foo}\
\n-a {@extend %-a}\
\n"
)
.unwrap(),
"-a |*.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/237_extend_with_universal_selector_different_namespace.hrx"
#[test]
#[ignore] // unexepected error
fn t237_extend_with_universal_selector_different_namespace() {
assert_eq!(
rsass(
"%-a ns|*.foo {a: b}\
\na {@extend .foo}\
\n-a {@extend %-a}\
\n"
)
.unwrap(),
"-a ns|*.foo {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/238_unify_root_pseudoelement.hrx"
#[test]
#[ignore] // unexepected error
fn t238_unify_root_pseudoelement() {
assert_eq!(
rsass(
"// We assume that by default classes don\'t apply to the :root unless marked explicitly.\
\n:root .foo-1 { test: 1; }\
\n.bar-1 .baz-1 { @extend .foo-1; }\
\n\
\n// We know the two classes must be the same :root element so we can combine them.\
\n.foo-2:root .bar-2 { test: 2; }\
\n.baz-2:root .bang-2 { @extend .bar-2; }\
\n\
\n// This extend should not apply because the :root elements are different.\
\nhtml:root .bar-3 { test: 3; }\
\nxml:root .bang-3 { @extend .bar-3}\
\n\
\n// We assume that direct descendant of the :root is not the same element as a descendant.\
\n.foo-4:root > .bar-4 .x-4 { test: 4; }\
\n.baz-4:root .bang-4 .y-4 {@extend .x-4}\
\n"
)
.unwrap(),
":root .foo-1, :root .bar-1 .baz-1 {\
\n test: 1;\
\n}\
\n.foo-2:root .bar-2, .baz-2.foo-2:root .bang-2 {\
\n test: 2;\
\n}\
\nhtml:root .bar-3 {\
\n test: 3;\
\n}\
\n.foo-4:root > .bar-4 .x-4, .baz-4.foo-4:root > .bar-4 .bang-4 .y-4 {\
\n test: 4;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/compound-unification-in-not.hrx"
#[test]
#[ignore] // unexepected error
fn compound_unification_in_not() {
assert_eq!(
rsass(
"// Make sure compound selectors are unified when two :not()s are extended.\
\n// :not() is special here because it\'s the only selector that\'s extended by\
\n// adding to the compound selector, rather than creating a new selector list.\
\n.a {@extend .c}\
\n.b {@extend .d}\
\n:not(.c):not(.d) {x: y}\
\n"
)
.unwrap(),
":not(.c):not(.a):not(.d):not(.b) {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/does_not_move_page_block_in_media.hrx"
#[test]
#[ignore] // wrong result
fn does_not_move_page_block_in_media() {
assert_eq!(
rsass(
"@media screen {\
\n a { x:y; }\
\n @page {}\
\n}"
)
.unwrap(),
"@media screen {\
\n a {\
\n x: y;\
\n }\
\n @page {}\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/escaped_selector.hrx"
#[test]
#[ignore] // unexepected error
fn escaped_selector() {
assert_eq!(
rsass(
"// Escapes in selectors\' identifiers should be normalized before `@extend` is\
\n// applied.\
\n.foo {escape: none}\
\n\\.foo {escape: slash dot}\
\n\\2E foo {escape: hex}\
\n\
\n.bar {@extend \\02e foo}\
\n"
)
.unwrap(),
".foo {\
\n escape: none;\
\n}\
\n\\.foo, .bar {\
\n escape: slash dot;\
\n}\
\n\\.foo, .bar {\
\n escape: hex;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/extend-extender.hrx"
#[test]
#[ignore] // unexepected error
fn extend_extender() {
assert_eq!(
rsass(
"// For implementations like Dart Sass that process extensions as they occur,\
\n// extending rules that contain their own extends needs special handling.\
\n.b {@extend .a}\
\n.c {@extend .b}\
\n.a {x: y}\
\n"
)
.unwrap(),
".a, .b, .c {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/extend-loop.hrx"
#[test]
#[ignore] // unexepected error
fn extend_loop() {
assert_eq!(
rsass(
"// Make sure extend loops are handled correctly. Test in all different orderings\
\n// so we can be sure this works for implementations like Dart Sass where extend\
\n// order matters.\
\n\
\n@media order1 {\
\n .x1.y1.a1 {x: y; @extend .b1}\
\n .z1.b1 {x: y; @extend .c1}\
\n .c1 {x: y; @extend .a1}\
\n}\
\n\
\n@media order2 {\
\n .x2.y2.a2 {x: y; @extend .b2}\
\n .c2 {x: y; @extend .a2}\
\n .z2.b2 {x: y; @extend .c2}\
\n}\
\n\
\n@media order3 {\
\n .z3.b3 {x: y; @extend .c3}\
\n .x3.y3.a3 {x: y; @extend .b3}\
\n .c3 {x: y; @extend .a3}\
\n}\
\n\
\n@media order4 {\
\n .z4.b4 {x: y; @extend .c4}\
\n .c4 {x: y; @extend .a4}\
\n .x4.y4.a4 {x: y; @extend .b4}\
\n}\
\n\
\n@media order5 {\
\n .c5 {x: y; @extend .a5}\
\n .z5.b5 {x: y; @extend .c5}\
\n .x5.y5.a5 {x: y; @extend .b5}\
\n}\
\n\
\n@media order6 {\
\n .c6 {x: y; @extend .a6}\
\n .x6.y6.a6 {x: y; @extend .b6}\
\n .z6.b6 {x: y; @extend .c6}\
\n}\
\n"
)
.unwrap(),
"@media order1 {\
\n .x1.y1.a1, .x1.y1.c1, .x1.y1.z1.b1 {\
\n x: y;\
\n }\
\n .z1.b1, .z1.x1.y1.a1, .z1.x1.y1.c1, .z1.x1.y1.b1 {\
\n x: y;\
\n }\
\n .c1, .z1.b1, .z1.x1.y1.a1, .z1.x1.y1.c1, .z1.x1.y1.b1 {\
\n x: y;\
\n }\
\n}\
\n@media order2 {\
\n .x2.y2.a2, .x2.y2.c2, .x2.y2.z2.b2 {\
\n x: y;\
\n }\
\n .c2, .z2.b2, .z2.x2.y2.a2, .z2.x2.y2.c2, .z2.x2.y2.b2 {\
\n x: y;\
\n }\
\n .z2.b2, .z2.x2.y2.a2, .z2.x2.y2.c2, .z2.x2.y2.b2 {\
\n x: y;\
\n }\
\n}\
\n@media order3 {\
\n .z3.b3, .z3.x3.y3.a3, .z3.x3.y3.c3, .z3.x3.y3.b3 {\
\n x: y;\
\n }\
\n .x3.y3.a3, .x3.y3.c3, .x3.y3.z3.b3 {\
\n x: y;\
\n }\
\n .c3, .z3.b3, .z3.x3.y3.a3, .z3.x3.y3.c3, .z3.x3.y3.b3 {\
\n x: y;\
\n }\
\n}\
\n@media order4 {\
\n .z4.b4, .z4.x4.y4.a4, .z4.x4.y4.c4, .z4.x4.y4.b4 {\
\n x: y;\
\n }\
\n .c4, .z4.b4, .z4.x4.y4.a4, .z4.x4.y4.c4, .z4.x4.y4.b4 {\
\n x: y;\
\n }\
\n .x4.y4.a4, .x4.y4.c4, .x4.y4.z4.b4 {\
\n x: y;\
\n }\
\n}\
\n@media order5 {\
\n .c5, .z5.b5, .z5.x5.y5.a5, .z5.x5.y5.c5, .z5.x5.y5.b5 {\
\n x: y;\
\n }\
\n .z5.b5, .z5.x5.y5.a5, .z5.x5.y5.c5, .z5.x5.y5.b5 {\
\n x: y;\
\n }\
\n .x5.y5.a5, .x5.y5.c5, .x5.y5.z5.b5 {\
\n x: y;\
\n }\
\n}\
\n@media order6 {\
\n .c6, .z6.b6, .z6.x6.y6.a6, .z6.x6.y6.c6, .z6.x6.y6.b6 {\
\n x: y;\
\n }\
\n .x6.y6.a6, .x6.y6.c6, .x6.y6.z6.b6 {\
\n x: y;\
\n }\
\n .z6.b6, .z6.x6.y6.a6, .z6.x6.y6.c6, .z6.x6.y6.b6 {\
\n x: y;\
\n }\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/extend-result-of-extend.hrx"
#[test]
#[ignore] // unexepected error
fn extend_result_of_extend() {
assert_eq!(
rsass(
"// The result of :not(.c) being extended should itself be extendable.\
\n.a {@extend :not(.b)}\
\n.b {@extend .c}\
\n:not(.c) {x: y}\
\n"
)
.unwrap(),
":not(.c):not(.b), .a:not(.c) {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/extend-self.hrx"
#[test]
#[ignore] // unexepected error
fn extend_self() {
assert_eq!(
rsass(
"// This shouldn\'t change the selector.\
\n.c, .a .b .c, .a .c .b {x: y; @extend .c}\
\n"
)
.unwrap(),
".c, .a .b .c, .a .c .b {\
\n x: y;\
\n}\
\n"
);
}
mod fake_pseudo_element_order;
// From "sass-spec/spec/non_conformant/extend-tests/issue_146.hrx"
#[test]
#[ignore] // wrong result
fn issue_146() {
assert_eq!(
rsass(
"%btn-style-default {\
\n background: green;\
\n &:hover{\
\n background: black;\
\n }\
\n}\
\n\
\nbutton {\
\n @extend %btn-style-default;\
\n}"
)
.unwrap(),
"button {\
\n background: green;\
\n}\
\nbutton:hover {\
\n background: black;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/nested-compound-unification.hrx"
#[test]
#[ignore] // unexepected error
fn nested_compound_unification() {
assert_eq!(
rsass(
"// Make sure compound unification properly handles weaving together parent\
\n// selectors.\
\n.a .b {@extend .e}\
\n.c .d {@extend .f}\
\n.e.f {x: y}\
\n"
)
.unwrap(),
".e.f, .a .f.b, .c .e.d, .a .c .b.d, .c .a .b.d {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/not-into-not-not.hrx"
#[test]
#[ignore] // unexepected error
fn not_into_not_not() {
assert_eq!(
rsass(
"// Regression test for dart-sass#191.\
\n:not(:not(.x)) {a: b}\
\n:not(.y) {@extend .x}\
\n"
)
.unwrap(),
":not(:not(.x)) {\
\n a: b;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/extend-tests/selector_list.hrx"
#[test]
#[ignore] // unexepected error
fn selector_list() {
assert_eq!(
rsass(
".foo {a: b}\
\n.bar {x: y}\
\n\
\n// Extending a selector list is equivalent to writing two @extends.\
\n.baz {@extend .foo, .bar}\
\n\
\n// The selector list should be parsed after interpolation is resolved.\
\n.bang {@extend .foo #{\",\"} .bar}\
\n"
)
.unwrap(),
".foo, .bang, .baz {\
\n a: b;\
\n}\
\n.bar, .bang, .baz {\
\n x: y;\
\n}\
\n"
);
}
mod subject_operator;
|
#[macro_use]
use derive_serialize::Serialize;
#[derive(Serialize)]
pub struct Apu {
// TODO: implement
}
|
use std::io::{self, Read};
use std::fmt;
use std::fs::File;
use std::path::Path;
use std::cell::RefCell;
use memchr::memchr;
use {Span, TokenStream, LexError};
use lex::lex_str;
const FILE_PADDING_BYTES: usize = 1;
/// Information regarding the on-disk location of a span of code.
/// This type is produced by `SourceMap::locinfo`.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct SourceLoc<'a> {
pub path: &'a Path,
pub line: usize,
pub col: usize,
}
impl<'a> fmt::Display for SourceLoc<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}:{}", self.path.display(), self.line, self.col)
}
}
struct FileInfo {
span: Span,
name: String,
src: String,
lines: Vec<usize>,
}
struct CodeMap {
files: Vec<FileInfo>,
offset: usize,
}
thread_local! {
static CODEMAP: RefCell<CodeMap> = RefCell::new(CodeMap {
files: Vec::new(),
offset: 0,
});
}
/// NOTE: This produces line and column. Line is 1-indexed, column is 0-indexed
fn offset_line_col(lines: &Vec<usize>, off: usize) -> (usize, usize) {
match lines.binary_search(&off) {
Ok(found) => (found + 1, 0),
Err(idx) => (idx, off - lines[idx - 1]),
}
}
fn lines_offsets(s: &[u8]) -> Vec<usize> {
let mut lines = vec![0];
let mut prev = 0;
while let Some(len) = memchr(b'\n', &s[prev..]) {
prev += len + 1;
lines.push(prev);
}
lines
}
fn with_fileinfo<F, R>(span: Span, f: F) -> Result<R, LexError>
where F: FnOnce(&FileInfo, Span, &str) -> Result<R, LexError> {
CODEMAP.with(|cm| {
let cm = cm.borrow();
if span.lo > span.hi {
return Err(LexError("invalid span object with negative length".to_owned()));
}
for fi in &cm.files {
if span.lo >= fi.span.lo && span.lo <= fi.span.hi {
if span.hi < fi.span.lo || span.hi > fi.span.hi {
return Err(LexError("span spans multiple input files".to_owned()));
}
// Get the local span and the source string
let loc_span = Span {
lo: span.lo - fi.span.lo,
hi: span.hi - fi.span.lo,
};
let src = &fi.src[loc_span.lo..loc_span.hi];
// Set the path
return f(fi, loc_span, src);
}
}
Err(LexError("span is not part of any input file".to_owned()))
})
}
pub fn file_to_span<P: AsRef<Path>>(path: P) -> io::Result<Span> {
let path = path.as_ref();
let mut source = String::new();
File::open(&path)?.read_to_string(&mut source)?;
Ok(string_to_span(path.to_string_lossy().into_owned(), source))
}
pub fn string_to_span(name: String, code: String) -> Span {
CODEMAP.with(|cm| {
let mut cm = cm.borrow_mut();
let offset = cm.offset;
cm.offset += code.len() + FILE_PADDING_BYTES;
let span = Span {
lo: offset,
hi: offset + code.len(),
};
// Register the read-in file in the SourceMap
cm.files.push(FileInfo {
span: span,
name: name,
lines: lines_offsets(code.as_bytes()),
src: code,
});
span
})
}
pub fn span_to_stream(span: Span) -> Result<TokenStream, LexError> {
with_fileinfo(span, |_, _, text| lex_str(text, span.lo))
}
pub fn span_to_name(span: Span) -> Result<String, LexError> {
with_fileinfo(span, |fi, _, _| Ok(fi.name.clone()))
}
pub fn span_to_line_col(span: Span) -> Result<(usize, usize), LexError> {
with_fileinfo(span, |fi, loc_span, _| {
Ok(offset_line_col(&fi.lines, loc_span.lo))
})
}
|
use sdl2::pixels::Color;
use sdl2::render::{Texture, WindowCanvas};
use specs::prelude::*;
use crate::ecs::components::*;
pub type SystemData<'a> = (ReadStorage<'a, Position>, ReadStorage<'a, Sprite>);
pub fn render(
canvas: &mut WindowCanvas,
background: Color,
textures: &[Texture],
data: SystemData,
) -> Result<(), String> {
canvas.set_draw_color(background);
canvas.clear();
for (pos, sprite) in (&data.0, &data.1).join() {
let draw_x = pos.x() as i32 - (sprite.size.x / 2);
let draw_y = pos.y() as i32 - (sprite.size.y / 2);
let destination =
sdl2::rect::Rect::new(draw_x, draw_y, sprite.size.x as u32, sprite.size.y as u32);
canvas.copy(&textures[sprite.spritesheet], sprite.src_rect, destination)?;
}
canvas.present();
Ok(())
}
|
// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use crocksdb_ffi::DBTableProperties;
use libc::{c_uchar, c_void};
use table_properties::TableProperties;
pub trait TableFilter {
// A callback to determine whether relevant keys for this scan exist in a
// given table based on the table's properties. The callback is passed the
// properties of each table during iteration. If the callback returns false,
// the table will not be scanned. This option only affects Iterators and has
// no impact on point lookups.
fn table_filter(&self, props: &TableProperties) -> bool;
}
pub extern "C" fn table_filter<T: TableFilter>(
ctx: *mut c_void,
props: *const DBTableProperties,
) -> c_uchar {
unsafe {
let filter = &*(ctx as *mut T);
let props = &*(props as *const TableProperties);
filter.table_filter(props) as c_uchar
}
}
pub extern "C" fn destroy_table_filter<T: TableFilter>(filter: *mut c_void) {
unsafe {
let _ = Box::from_raw(filter as *mut T);
}
}
|
#[doc = "Reader of register IDACA"]
pub type R = crate::R<u32, super::IDACA>;
#[doc = "Writer for register IDACA"]
pub type W = crate::W<u32, super::IDACA>;
#[doc = "Register IDACA `reset()`'s with value 0"]
impl crate::ResetValue for super::IDACA {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `VAL`"]
pub type VAL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `VAL`"]
pub struct VAL_W<'a> {
w: &'a mut W,
}
impl<'a> VAL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum POL_DYN_A {
#[doc = "0: N/A"]
STATIC,
#[doc = "1: N/A"]
DYNAMIC,
}
impl From<POL_DYN_A> for bool {
#[inline(always)]
fn from(variant: POL_DYN_A) -> Self {
match variant {
POL_DYN_A::STATIC => false,
POL_DYN_A::DYNAMIC => true,
}
}
}
#[doc = "Reader of field `POL_DYN`"]
pub type POL_DYN_R = crate::R<bool, POL_DYN_A>;
impl POL_DYN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> POL_DYN_A {
match self.bits {
false => POL_DYN_A::STATIC,
true => POL_DYN_A::DYNAMIC,
}
}
#[doc = "Checks if the value of the field is `STATIC`"]
#[inline(always)]
pub fn is_static_(&self) -> bool {
*self == POL_DYN_A::STATIC
}
#[doc = "Checks if the value of the field is `DYNAMIC`"]
#[inline(always)]
pub fn is_dynamic(&self) -> bool {
*self == POL_DYN_A::DYNAMIC
}
}
#[doc = "Write proxy for field `POL_DYN`"]
pub struct POL_DYN_W<'a> {
w: &'a mut W,
}
impl<'a> POL_DYN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: POL_DYN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn static_(self) -> &'a mut W {
self.variant(POL_DYN_A::STATIC)
}
#[doc = "N/A"]
#[inline(always)]
pub fn dynamic(self) -> &'a mut W {
self.variant(POL_DYN_A::DYNAMIC)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum POLARITY_A {
#[doc = "0: Normal: sensor switching between Vssio and Cmod. For non-CSD application, IDAC1 will source current."]
VSSA_SRC,
#[doc = "1: Inverted: sensor switch between Vddio and Cmod. For non-CSD application, IDAC1 will sink current."]
VDDA_SNK,
#[doc = "2: N/A"]
SENSE,
#[doc = "3: N/A"]
SENSE_INV,
}
impl From<POLARITY_A> for u8 {
#[inline(always)]
fn from(variant: POLARITY_A) -> Self {
match variant {
POLARITY_A::VSSA_SRC => 0,
POLARITY_A::VDDA_SNK => 1,
POLARITY_A::SENSE => 2,
POLARITY_A::SENSE_INV => 3,
}
}
}
#[doc = "Reader of field `POLARITY`"]
pub type POLARITY_R = crate::R<u8, POLARITY_A>;
impl POLARITY_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> POLARITY_A {
match self.bits {
0 => POLARITY_A::VSSA_SRC,
1 => POLARITY_A::VDDA_SNK,
2 => POLARITY_A::SENSE,
3 => POLARITY_A::SENSE_INV,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `VSSA_SRC`"]
#[inline(always)]
pub fn is_vssa_src(&self) -> bool {
*self == POLARITY_A::VSSA_SRC
}
#[doc = "Checks if the value of the field is `VDDA_SNK`"]
#[inline(always)]
pub fn is_vdda_snk(&self) -> bool {
*self == POLARITY_A::VDDA_SNK
}
#[doc = "Checks if the value of the field is `SENSE`"]
#[inline(always)]
pub fn is_sense(&self) -> bool {
*self == POLARITY_A::SENSE
}
#[doc = "Checks if the value of the field is `SENSE_INV`"]
#[inline(always)]
pub fn is_sense_inv(&self) -> bool {
*self == POLARITY_A::SENSE_INV
}
}
#[doc = "Write proxy for field `POLARITY`"]
pub struct POLARITY_W<'a> {
w: &'a mut W,
}
impl<'a> POLARITY_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: POLARITY_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Normal: sensor switching between Vssio and Cmod. For non-CSD application, IDAC1 will source current."]
#[inline(always)]
pub fn vssa_src(self) -> &'a mut W {
self.variant(POLARITY_A::VSSA_SRC)
}
#[doc = "Inverted: sensor switch between Vddio and Cmod. For non-CSD application, IDAC1 will sink current."]
#[inline(always)]
pub fn vdda_snk(self) -> &'a mut W {
self.variant(POLARITY_A::VDDA_SNK)
}
#[doc = "N/A"]
#[inline(always)]
pub fn sense(self) -> &'a mut W {
self.variant(POLARITY_A::SENSE)
}
#[doc = "N/A"]
#[inline(always)]
pub fn sense_inv(self) -> &'a mut W {
self.variant(POLARITY_A::SENSE_INV)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BAL_MODE_A {
#[doc = "0: N/A"]
FULL,
#[doc = "1: N/A"]
PHI1,
#[doc = "2: N/A"]
PHI2,
#[doc = "3: N/A"]
PHI1_2,
}
impl From<BAL_MODE_A> for u8 {
#[inline(always)]
fn from(variant: BAL_MODE_A) -> Self {
match variant {
BAL_MODE_A::FULL => 0,
BAL_MODE_A::PHI1 => 1,
BAL_MODE_A::PHI2 => 2,
BAL_MODE_A::PHI1_2 => 3,
}
}
}
#[doc = "Reader of field `BAL_MODE`"]
pub type BAL_MODE_R = crate::R<u8, BAL_MODE_A>;
impl BAL_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BAL_MODE_A {
match self.bits {
0 => BAL_MODE_A::FULL,
1 => BAL_MODE_A::PHI1,
2 => BAL_MODE_A::PHI2,
3 => BAL_MODE_A::PHI1_2,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `FULL`"]
#[inline(always)]
pub fn is_full(&self) -> bool {
*self == BAL_MODE_A::FULL
}
#[doc = "Checks if the value of the field is `PHI1`"]
#[inline(always)]
pub fn is_phi1(&self) -> bool {
*self == BAL_MODE_A::PHI1
}
#[doc = "Checks if the value of the field is `PHI2`"]
#[inline(always)]
pub fn is_phi2(&self) -> bool {
*self == BAL_MODE_A::PHI2
}
#[doc = "Checks if the value of the field is `PHI1_2`"]
#[inline(always)]
pub fn is_phi1_2(&self) -> bool {
*self == BAL_MODE_A::PHI1_2
}
}
#[doc = "Write proxy for field `BAL_MODE`"]
pub struct BAL_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> BAL_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: BAL_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn full(self) -> &'a mut W {
self.variant(BAL_MODE_A::FULL)
}
#[doc = "N/A"]
#[inline(always)]
pub fn phi1(self) -> &'a mut W {
self.variant(BAL_MODE_A::PHI1)
}
#[doc = "N/A"]
#[inline(always)]
pub fn phi2(self) -> &'a mut W {
self.variant(BAL_MODE_A::PHI2)
}
#[doc = "N/A"]
#[inline(always)]
pub fn phi1_2(self) -> &'a mut W {
self.variant(BAL_MODE_A::PHI1_2)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LEG1_MODE_A {
#[doc = "0: N/A"]
GP_STATIC,
#[doc = "1: N/A"]
GP,
#[doc = "2: N/A"]
CSD_STATIC,
#[doc = "3: N/A"]
CSD,
}
impl From<LEG1_MODE_A> for u8 {
#[inline(always)]
fn from(variant: LEG1_MODE_A) -> Self {
match variant {
LEG1_MODE_A::GP_STATIC => 0,
LEG1_MODE_A::GP => 1,
LEG1_MODE_A::CSD_STATIC => 2,
LEG1_MODE_A::CSD => 3,
}
}
}
#[doc = "Reader of field `LEG1_MODE`"]
pub type LEG1_MODE_R = crate::R<u8, LEG1_MODE_A>;
impl LEG1_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LEG1_MODE_A {
match self.bits {
0 => LEG1_MODE_A::GP_STATIC,
1 => LEG1_MODE_A::GP,
2 => LEG1_MODE_A::CSD_STATIC,
3 => LEG1_MODE_A::CSD,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GP_STATIC`"]
#[inline(always)]
pub fn is_gp_static(&self) -> bool {
*self == LEG1_MODE_A::GP_STATIC
}
#[doc = "Checks if the value of the field is `GP`"]
#[inline(always)]
pub fn is_gp(&self) -> bool {
*self == LEG1_MODE_A::GP
}
#[doc = "Checks if the value of the field is `CSD_STATIC`"]
#[inline(always)]
pub fn is_csd_static(&self) -> bool {
*self == LEG1_MODE_A::CSD_STATIC
}
#[doc = "Checks if the value of the field is `CSD`"]
#[inline(always)]
pub fn is_csd(&self) -> bool {
*self == LEG1_MODE_A::CSD
}
}
#[doc = "Write proxy for field `LEG1_MODE`"]
pub struct LEG1_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> LEG1_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LEG1_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn gp_static(self) -> &'a mut W {
self.variant(LEG1_MODE_A::GP_STATIC)
}
#[doc = "N/A"]
#[inline(always)]
pub fn gp(self) -> &'a mut W {
self.variant(LEG1_MODE_A::GP)
}
#[doc = "N/A"]
#[inline(always)]
pub fn csd_static(self) -> &'a mut W {
self.variant(LEG1_MODE_A::CSD_STATIC)
}
#[doc = "N/A"]
#[inline(always)]
pub fn csd(self) -> &'a mut W {
self.variant(LEG1_MODE_A::CSD)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LEG2_MODE_A {
#[doc = "0: N/A"]
GP_STATIC,
#[doc = "1: N/A"]
GP,
#[doc = "2: N/A"]
CSD_STATIC,
#[doc = "3: N/A"]
CSD,
}
impl From<LEG2_MODE_A> for u8 {
#[inline(always)]
fn from(variant: LEG2_MODE_A) -> Self {
match variant {
LEG2_MODE_A::GP_STATIC => 0,
LEG2_MODE_A::GP => 1,
LEG2_MODE_A::CSD_STATIC => 2,
LEG2_MODE_A::CSD => 3,
}
}
}
#[doc = "Reader of field `LEG2_MODE`"]
pub type LEG2_MODE_R = crate::R<u8, LEG2_MODE_A>;
impl LEG2_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LEG2_MODE_A {
match self.bits {
0 => LEG2_MODE_A::GP_STATIC,
1 => LEG2_MODE_A::GP,
2 => LEG2_MODE_A::CSD_STATIC,
3 => LEG2_MODE_A::CSD,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `GP_STATIC`"]
#[inline(always)]
pub fn is_gp_static(&self) -> bool {
*self == LEG2_MODE_A::GP_STATIC
}
#[doc = "Checks if the value of the field is `GP`"]
#[inline(always)]
pub fn is_gp(&self) -> bool {
*self == LEG2_MODE_A::GP
}
#[doc = "Checks if the value of the field is `CSD_STATIC`"]
#[inline(always)]
pub fn is_csd_static(&self) -> bool {
*self == LEG2_MODE_A::CSD_STATIC
}
#[doc = "Checks if the value of the field is `CSD`"]
#[inline(always)]
pub fn is_csd(&self) -> bool {
*self == LEG2_MODE_A::CSD
}
}
#[doc = "Write proxy for field `LEG2_MODE`"]
pub struct LEG2_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> LEG2_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LEG2_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn gp_static(self) -> &'a mut W {
self.variant(LEG2_MODE_A::GP_STATIC)
}
#[doc = "N/A"]
#[inline(always)]
pub fn gp(self) -> &'a mut W {
self.variant(LEG2_MODE_A::GP)
}
#[doc = "N/A"]
#[inline(always)]
pub fn csd_static(self) -> &'a mut W {
self.variant(LEG2_MODE_A::CSD_STATIC)
}
#[doc = "N/A"]
#[inline(always)]
pub fn csd(self) -> &'a mut W {
self.variant(LEG2_MODE_A::CSD)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 18)) | (((value as u32) & 0x03) << 18);
self.w
}
}
#[doc = "Reader of field `DSI_CTRL_EN`"]
pub type DSI_CTRL_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DSI_CTRL_EN`"]
pub struct DSI_CTRL_EN_W<'a> {
w: &'a mut W,
}
impl<'a> DSI_CTRL_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RANGE_A {
#[doc = "0: N/A"]
IDAC_LO,
#[doc = "1: N/A"]
IDAC_MED,
#[doc = "2: N/A"]
IDAC_HI,
}
impl From<RANGE_A> for u8 {
#[inline(always)]
fn from(variant: RANGE_A) -> Self {
match variant {
RANGE_A::IDAC_LO => 0,
RANGE_A::IDAC_MED => 1,
RANGE_A::IDAC_HI => 2,
}
}
}
#[doc = "Reader of field `RANGE`"]
pub type RANGE_R = crate::R<u8, RANGE_A>;
impl RANGE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, RANGE_A> {
use crate::Variant::*;
match self.bits {
0 => Val(RANGE_A::IDAC_LO),
1 => Val(RANGE_A::IDAC_MED),
2 => Val(RANGE_A::IDAC_HI),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `IDAC_LO`"]
#[inline(always)]
pub fn is_idac_lo(&self) -> bool {
*self == RANGE_A::IDAC_LO
}
#[doc = "Checks if the value of the field is `IDAC_MED`"]
#[inline(always)]
pub fn is_idac_med(&self) -> bool {
*self == RANGE_A::IDAC_MED
}
#[doc = "Checks if the value of the field is `IDAC_HI`"]
#[inline(always)]
pub fn is_idac_hi(&self) -> bool {
*self == RANGE_A::IDAC_HI
}
}
#[doc = "Write proxy for field `RANGE`"]
pub struct RANGE_W<'a> {
w: &'a mut W,
}
impl<'a> RANGE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RANGE_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "N/A"]
#[inline(always)]
pub fn idac_lo(self) -> &'a mut W {
self.variant(RANGE_A::IDAC_LO)
}
#[doc = "N/A"]
#[inline(always)]
pub fn idac_med(self) -> &'a mut W {
self.variant(RANGE_A::IDAC_MED)
}
#[doc = "N/A"]
#[inline(always)]
pub fn idac_hi(self) -> &'a mut W {
self.variant(RANGE_A::IDAC_HI)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22);
self.w
}
}
#[doc = "Reader of field `LEG1_EN`"]
pub type LEG1_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LEG1_EN`"]
pub struct LEG1_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LEG1_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `LEG2_EN`"]
pub type LEG2_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LEG2_EN`"]
pub struct LEG2_EN_W<'a> {
w: &'a mut W,
}
impl<'a> LEG2_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
impl R {
#[doc = "Bits 0:6 - N/A"]
#[inline(always)]
pub fn val(&self) -> VAL_R {
VAL_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bit 7 - N/A"]
#[inline(always)]
pub fn pol_dyn(&self) -> POL_DYN_R {
POL_DYN_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:9 - N/A"]
#[inline(always)]
pub fn polarity(&self) -> POLARITY_R {
POLARITY_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 10:11 - N/A"]
#[inline(always)]
pub fn bal_mode(&self) -> BAL_MODE_R {
BAL_MODE_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bits 16:17 - N/A"]
#[inline(always)]
pub fn leg1_mode(&self) -> LEG1_MODE_R {
LEG1_MODE_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bits 18:19 - N/A"]
#[inline(always)]
pub fn leg2_mode(&self) -> LEG2_MODE_R {
LEG2_MODE_R::new(((self.bits >> 18) & 0x03) as u8)
}
#[doc = "Bit 21 - N/A"]
#[inline(always)]
pub fn dsi_ctrl_en(&self) -> DSI_CTRL_EN_R {
DSI_CTRL_EN_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bits 22:23 - N/A"]
#[inline(always)]
pub fn range(&self) -> RANGE_R {
RANGE_R::new(((self.bits >> 22) & 0x03) as u8)
}
#[doc = "Bit 24 - N/A"]
#[inline(always)]
pub fn leg1_en(&self) -> LEG1_EN_R {
LEG1_EN_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - N/A"]
#[inline(always)]
pub fn leg2_en(&self) -> LEG2_EN_R {
LEG2_EN_R::new(((self.bits >> 25) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:6 - N/A"]
#[inline(always)]
pub fn val(&mut self) -> VAL_W {
VAL_W { w: self }
}
#[doc = "Bit 7 - N/A"]
#[inline(always)]
pub fn pol_dyn(&mut self) -> POL_DYN_W {
POL_DYN_W { w: self }
}
#[doc = "Bits 8:9 - N/A"]
#[inline(always)]
pub fn polarity(&mut self) -> POLARITY_W {
POLARITY_W { w: self }
}
#[doc = "Bits 10:11 - N/A"]
#[inline(always)]
pub fn bal_mode(&mut self) -> BAL_MODE_W {
BAL_MODE_W { w: self }
}
#[doc = "Bits 16:17 - N/A"]
#[inline(always)]
pub fn leg1_mode(&mut self) -> LEG1_MODE_W {
LEG1_MODE_W { w: self }
}
#[doc = "Bits 18:19 - N/A"]
#[inline(always)]
pub fn leg2_mode(&mut self) -> LEG2_MODE_W {
LEG2_MODE_W { w: self }
}
#[doc = "Bit 21 - N/A"]
#[inline(always)]
pub fn dsi_ctrl_en(&mut self) -> DSI_CTRL_EN_W {
DSI_CTRL_EN_W { w: self }
}
#[doc = "Bits 22:23 - N/A"]
#[inline(always)]
pub fn range(&mut self) -> RANGE_W {
RANGE_W { w: self }
}
#[doc = "Bit 24 - N/A"]
#[inline(always)]
pub fn leg1_en(&mut self) -> LEG1_EN_W {
LEG1_EN_W { w: self }
}
#[doc = "Bit 25 - N/A"]
#[inline(always)]
pub fn leg2_en(&mut self) -> LEG2_EN_W {
LEG2_EN_W { w: self }
}
}
|
pub use self::context::OpenCLContext;
pub use self::device::OpenCLDevice;
pub use self::framework::OpenCL;
pub use self::memory::{OpenCLBuf, OpenCLMemory};
mod context;
mod device;
mod error;
mod framework;
mod memory; |
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EdgeModuleProperties {
#[serde(rename = "edgeModuleId", default, skip_serializing_if = "Option::is_none")]
pub edge_module_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ListProvisioningTokenInput {
#[serde(rename = "expirationDate")]
pub expiration_date: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EdgeModuleProvisioningToken {
#[serde(rename = "expirationDate", default, skip_serializing_if = "Option::is_none")]
pub expiration_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EdgeModuleEntity {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<EdgeModuleProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EdgeModuleEntityCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<EdgeModuleEntity>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
pub name: sku::Name,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<sku::Tier>,
}
pub mod sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "Live_S1")]
LiveS1,
#[serde(rename = "Batch_S1")]
BatchS1,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Tier {
Standard,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTopologyProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDeclaration>,
pub sources: Vec<SourceNodeBase>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub processors: Vec<ProcessorNodeBase>,
pub sinks: Vec<SinkNodeBase>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTopologyPropertiesUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDeclaration>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub sources: Vec<SourceNodeBase>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub processors: Vec<ProcessorNodeBase>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub sinks: Vec<SinkNodeBase>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ParameterDeclaration {
pub name: String,
#[serde(rename = "type")]
pub type_: parameter_declaration::Type,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub default: Option<String>,
}
pub mod parameter_declaration {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
String,
SecretString,
Int,
Double,
Bool,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NodeBase {
#[serde(rename = "@type")]
pub type_: String,
pub name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SourceNodeBase {
#[serde(flatten)]
pub node_base: NodeBase,
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProcessorNodeBase {
#[serde(flatten)]
pub node_base: NodeBase,
#[serde(rename = "@type")]
pub type_: String,
pub inputs: Vec<NodeInput>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SinkNodeBase {
#[serde(flatten)]
pub node_base: NodeBase,
#[serde(rename = "@type")]
pub type_: String,
pub inputs: Vec<NodeInput>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NodeInput {
#[serde(rename = "nodeName")]
pub node_name: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RtspSource {
#[serde(flatten)]
pub source_node_base: SourceNodeBase,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub transport: Option<rtsp_source::Transport>,
pub endpoint: EndpointBase,
}
pub mod rtsp_source {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Transport {
Http,
Tcp,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EndpointBase {
#[serde(rename = "@type")]
pub type_: String,
pub credentials: CredentialsBase,
pub url: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tunnel: Option<TunnelBase>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CredentialsBase {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TunnelBase {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsernamePasswordCredentials {
#[serde(flatten)]
pub credentials_base: CredentialsBase,
pub username: String,
pub password: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SecureIotDeviceRemoteTunnel {
#[serde(flatten)]
pub tunnel_base: TunnelBase,
#[serde(rename = "iotHubName")]
pub iot_hub_name: String,
#[serde(rename = "deviceId")]
pub device_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UnsecuredEndpoint {
#[serde(flatten)]
pub endpoint_base: EndpointBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TlsEndpoint {
#[serde(flatten)]
pub endpoint_base: EndpointBase,
#[serde(rename = "trustedCertificates", default, skip_serializing_if = "Option::is_none")]
pub trusted_certificates: Option<CertificateSource>,
#[serde(rename = "validationOptions", default, skip_serializing_if = "Option::is_none")]
pub validation_options: Option<TlsValidationOptions>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CertificateSource {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TlsValidationOptions {
#[serde(rename = "ignoreHostname", default, skip_serializing_if = "Option::is_none")]
pub ignore_hostname: Option<String>,
#[serde(rename = "ignoreSignature", default, skip_serializing_if = "Option::is_none")]
pub ignore_signature: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PemCertificateList {
#[serde(flatten)]
pub certificate_source: CertificateSource,
pub certificates: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoSource {
#[serde(flatten)]
pub source_node_base: SourceNodeBase,
#[serde(rename = "videoName")]
pub video_name: String,
#[serde(rename = "timeSequences")]
pub time_sequences: TimeSequenceBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TimeSequenceBase {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoSequenceAbsoluteTimeMarkers {
#[serde(flatten)]
pub time_sequence_base: TimeSequenceBase,
pub ranges: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncoderProcessor {
#[serde(flatten)]
pub processor_node_base: ProcessorNodeBase,
pub preset: EncoderPresetBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncoderPresetBase {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncoderSystemPreset {
#[serde(flatten)]
pub encoder_preset_base: EncoderPresetBase,
pub name: encoder_system_preset::Name,
}
pub mod encoder_system_preset {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
#[serde(rename = "SingleLayer_540p_H264_AAC")]
SingleLayer540pH264Aac,
#[serde(rename = "SingleLayer_720p_H264_AAC")]
SingleLayer720pH264Aac,
#[serde(rename = "SingleLayer_1080p_H264_AAC")]
SingleLayer1080pH264Aac,
#[serde(rename = "SingleLayer_2160p_H264_AAC")]
SingleLayer2160pH264Aac,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EncoderCustomPreset {
#[serde(flatten)]
pub encoder_preset_base: EncoderPresetBase,
#[serde(rename = "audioEncoder", default, skip_serializing_if = "Option::is_none")]
pub audio_encoder: Option<AudioEncoderBase>,
#[serde(rename = "videoEncoder", default, skip_serializing_if = "Option::is_none")]
pub video_encoder: Option<VideoEncoderBase>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AudioEncoderBase {
#[serde(rename = "@type")]
pub type_: String,
#[serde(rename = "bitrateKbps", default, skip_serializing_if = "Option::is_none")]
pub bitrate_kbps: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoEncoderBase {
#[serde(rename = "@type")]
pub type_: String,
#[serde(rename = "bitrateKbps", default, skip_serializing_if = "Option::is_none")]
pub bitrate_kbps: Option<String>,
#[serde(rename = "frameRate", default, skip_serializing_if = "Option::is_none")]
pub frame_rate: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scale: Option<VideoScale>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AudioEncoderAac {
#[serde(flatten)]
pub audio_encoder_base: AudioEncoderBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoScale {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub height: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub width: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub mode: Option<video_scale::Mode>,
}
pub mod video_scale {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
Pad,
PreserveAspectRatio,
Stretch,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoEncoderH264 {
#[serde(flatten)]
pub video_encoder_base: VideoEncoderBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoSink {
#[serde(flatten)]
pub sink_node_base: SinkNodeBase,
#[serde(rename = "videoName")]
pub video_name: String,
#[serde(rename = "videoCreationProperties", default, skip_serializing_if = "Option::is_none")]
pub video_creation_properties: Option<VideoCreationProperties>,
#[serde(rename = "videoPublishingOptions", default, skip_serializing_if = "Option::is_none")]
pub video_publishing_options: Option<VideoPublishingOptions>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoCreationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "segmentLength", default, skip_serializing_if = "Option::is_none")]
pub segment_length: Option<String>,
#[serde(rename = "retentionPeriod", default, skip_serializing_if = "Option::is_none")]
pub retention_period: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoPublishingOptions {
#[serde(rename = "disableArchive", default, skip_serializing_if = "Option::is_none")]
pub disable_archive: Option<String>,
#[serde(rename = "disableRtspPublishing", default, skip_serializing_if = "Option::is_none")]
pub disable_rtsp_publishing: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ParameterDefinition {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTopology {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PipelineTopologyProperties>,
pub kind: pipeline_topology::Kind,
pub sku: Sku,
}
pub mod pipeline_topology {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
Live,
Batch,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTopologyUpdate {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PipelineTopologyPropertiesUpdate>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<pipeline_topology_update::Kind>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
}
pub mod pipeline_topology_update {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Kind {
Live,
Batch,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipelineProperties {
#[serde(rename = "topologyName")]
pub topology_name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "bitrateKbps")]
pub bitrate_kbps: i32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<live_pipeline_properties::State>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDefinition>,
}
pub mod live_pipeline_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Inactive,
Activating,
Active,
Deactivating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipelinePropertiesUpdate {
#[serde(rename = "topologyName", default, skip_serializing_if = "Option::is_none")]
pub topology_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "bitrateKbps", default, skip_serializing_if = "Option::is_none")]
pub bitrate_kbps: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<live_pipeline_properties_update::State>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDefinition>,
}
pub mod live_pipeline_properties_update {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Inactive,
Activating,
Active,
Deactivating,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipeline {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LivePipelineProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipelineUpdate {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LivePipelinePropertiesUpdate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobProperties {
#[serde(rename = "topologyName")]
pub topology_name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<pipeline_job_properties::State>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiration: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<PipelineJobError>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDefinition>,
}
pub mod pipeline_job_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Processing,
Canceled,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobPropertiesUpdate {
#[serde(rename = "topologyName", default, skip_serializing_if = "Option::is_none")]
pub topology_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<pipeline_job_properties_update::State>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiration: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<PipelineJobError>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ParameterDefinition>,
}
pub mod pipeline_job_properties_update {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Processing,
Canceled,
Completed,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJob {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PipelineJobProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobUpdate {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PipelineJobPropertiesUpdate>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipelineOperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobOperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineTopologyCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PipelineTopology>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LivePipelineCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<LivePipeline>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PipelineJobCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PipelineJob>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricDimension {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "toBeExportedForShoebox", default, skip_serializing_if = "Option::is_none")]
pub to_be_exported_for_shoebox: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<metric_specification::Unit>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<metric_specification::AggregationType>,
#[serde(rename = "lockAggregationType", default, skip_serializing_if = "Option::is_none")]
pub lock_aggregation_type: Option<metric_specification::LockAggregationType>,
#[serde(rename = "supportedAggregationTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_aggregation_types: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<MetricDimension>,
#[serde(rename = "enableRegionalMdmAccount", default, skip_serializing_if = "Option::is_none")]
pub enable_regional_mdm_account: Option<bool>,
#[serde(rename = "sourceMdmAccount", default, skip_serializing_if = "Option::is_none")]
pub source_mdm_account: Option<String>,
#[serde(rename = "sourceMdmNamespace", default, skip_serializing_if = "Option::is_none")]
pub source_mdm_namespace: Option<String>,
#[serde(rename = "supportedTimeGrainTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_time_grain_types: Vec<String>,
}
pub mod metric_specification {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Unit {
Bytes,
Count,
Milliseconds,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AggregationType {
Average,
Count,
Total,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LockAggregationType {
Average,
Count,
Total,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<ServiceSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceSpecification {
#[serde(rename = "logSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub log_specifications: Vec<LogSpecification>,
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")]
pub blob_duration: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<Properties>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(rename = "actionType", default, skip_serializing_if = "Option::is_none")]
pub action_type: Option<operation::ActionType>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ActionType {
Internal,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageAccount {
pub id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<ResourceIdentity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerProperties {
#[serde(rename = "storageAccounts")]
pub storage_accounts: Vec<StorageAccount>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub endpoints: Vec<Endpoint>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<AccountEncryption>,
#[serde(rename = "iotHubs", default, skip_serializing_if = "Vec::is_empty")]
pub iot_hubs: Vec<IotHub>,
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<video_analyzer_properties::PublicNetworkAccess>,
#[serde(rename = "networkAccessControl", default, skip_serializing_if = "Option::is_none")]
pub network_access_control: Option<NetworkAccessControl>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<video_analyzer_properties::ProvisioningState>,
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
}
pub mod video_analyzer_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Failed,
InProgress,
Succeeded,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerPropertiesUpdate {
#[serde(rename = "storageAccounts", default, skip_serializing_if = "Vec::is_empty")]
pub storage_accounts: Vec<StorageAccount>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub endpoints: Vec<Endpoint>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<AccountEncryption>,
#[serde(rename = "iotHubs", default, skip_serializing_if = "Vec::is_empty")]
pub iot_hubs: Vec<IotHub>,
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<video_analyzer_properties_update::PublicNetworkAccess>,
#[serde(rename = "networkAccessControl", default, skip_serializing_if = "Option::is_none")]
pub network_access_control: Option<NetworkAccessControl>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<video_analyzer_properties_update::ProvisioningState>,
#[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")]
pub private_endpoint_connections: Vec<PrivateEndpointConnection>,
}
pub mod video_analyzer_properties_update {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Failed,
InProgress,
Succeeded,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzer {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VideoAnalyzerProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VideoAnalyzerIdentity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerUpdate {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VideoAnalyzerPropertiesUpdate>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<VideoAnalyzerIdentity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Endpoint {
#[serde(rename = "endpointUrl", default, skip_serializing_if = "Option::is_none")]
pub endpoint_url: Option<String>,
#[serde(rename = "type")]
pub type_: endpoint::Type,
}
pub mod endpoint {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
ClientApi,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserAssignedManagedIdentity {
#[serde(rename = "clientId", default, skip_serializing_if = "Option::is_none")]
pub client_id: Option<String>,
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserAssignedManagedIdentities {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerIdentity {
#[serde(rename = "type")]
pub type_: String,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<UserAssignedManagedIdentities>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceIdentity {
#[serde(rename = "userAssignedIdentity")]
pub user_assigned_identity: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultProperties {
#[serde(rename = "keyIdentifier")]
pub key_identifier: String,
#[serde(rename = "currentKeyIdentifier", default, skip_serializing_if = "Option::is_none")]
pub current_key_identifier: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccountEncryption {
#[serde(rename = "type")]
pub type_: account_encryption::Type,
#[serde(rename = "keyVaultProperties", default, skip_serializing_if = "Option::is_none")]
pub key_vault_properties: Option<KeyVaultProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<ResourceIdentity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
}
pub mod account_encryption {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemKey,
CustomerKey,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct IotHub {
pub id: String,
pub identity: ResourceIdentity,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupLevelAccessControl {
#[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")]
pub public_network_access: Option<group_level_access_control::PublicNetworkAccess>,
}
pub mod group_level_access_control {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PublicNetworkAccess {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkAccessControl {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub integration: Option<GroupLevelAccessControl>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ingestion: Option<GroupLevelAccessControl>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub consumption: Option<GroupLevelAccessControl>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerPrivateEndpointConnectionOperationStatus {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerOperationStatus {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoAnalyzerCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VideoAnalyzer>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoContentUrls {
#[serde(rename = "downloadUrl", default, skip_serializing_if = "Option::is_none")]
pub download_url: Option<String>,
#[serde(rename = "archiveBaseUrl", default, skip_serializing_if = "Option::is_none")]
pub archive_base_url: Option<String>,
#[serde(rename = "rtspTunnelUrl", default, skip_serializing_if = "Option::is_none")]
pub rtsp_tunnel_url: Option<String>,
#[serde(rename = "previewImageUrls", default, skip_serializing_if = "Option::is_none")]
pub preview_image_urls: Option<VideoPreviewImageUrls>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoPreviewImageUrls {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub small: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub medium: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub large: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoFlags {
#[serde(rename = "canStream")]
pub can_stream: bool,
#[serde(rename = "hasData")]
pub has_data: bool,
#[serde(rename = "isInUse")]
pub is_in_use: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoMediaInfo {
#[serde(rename = "segmentLength", default, skip_serializing_if = "Option::is_none")]
pub segment_length: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoContentToken {
#[serde(rename = "expirationDate", default, skip_serializing_if = "Option::is_none")]
pub expiration_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub token: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<video_properties::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub flags: Option<VideoFlags>,
#[serde(rename = "contentUrls", default, skip_serializing_if = "Option::is_none")]
pub content_urls: Option<VideoContentUrls>,
#[serde(rename = "mediaInfo", default, skip_serializing_if = "Option::is_none")]
pub media_info: Option<VideoMediaInfo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub archival: Option<VideoArchival>,
}
pub mod video_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Archive,
File,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoArchival {
#[serde(rename = "retentionPeriod", default, skip_serializing_if = "Option::is_none")]
pub retention_period: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AuthenticationBase {
#[serde(rename = "@type")]
pub type_: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenClaim {
pub name: String,
pub value: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct JwtAuthentication {
#[serde(flatten)]
pub authentication_base: AuthenticationBase,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub issuers: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub audiences: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub claims: Vec<TokenClaim>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub keys: Vec<TokenKey>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TokenKey {
#[serde(rename = "@type")]
pub type_: String,
pub kid: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RsaTokenKey {
#[serde(flatten)]
pub token_key: TokenKey,
pub alg: rsa_token_key::Alg,
pub n: String,
pub e: String,
}
pub mod rsa_token_key {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Alg {
#[serde(rename = "RS256")]
Rs256,
#[serde(rename = "RS384")]
Rs384,
#[serde(rename = "RS512")]
Rs512,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EccTokenKey {
#[serde(flatten)]
pub token_key: TokenKey,
pub alg: ecc_token_key::Alg,
pub x: String,
pub y: String,
}
pub mod ecc_token_key {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Alg {
#[serde(rename = "ES256")]
Es256,
#[serde(rename = "ES384")]
Es384,
#[serde(rename = "ES512")]
Es512,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccessPolicyProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub role: Option<access_policy_properties::Role>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub authentication: Option<AuthenticationBase>,
}
pub mod access_policy_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Role {
Reader,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoEntity {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<VideoProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccessPolicyEntity {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AccessPolicyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VideoEntityCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VideoEntity>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccessPolicyEntityCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<AccessPolicyEntity>,
#[serde(rename = "@nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemData {
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
pub mod system_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpoint>,
#[serde(rename = "privateLinkServiceConnectionState")]
pub private_link_service_connection_state: PrivateLinkServiceConnectionState,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<PrivateEndpointConnectionProvisioningState>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<PrivateEndpointServiceConnectionStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointServiceConnectionStatus {
Pending,
Approved,
Rejected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointConnectionProvisioningState {
Succeeded,
Creating,
Deleting,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameAvailabilityRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameAvailabilityResponse {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<check_name_availability_response::Reason>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
pub mod check_name_availability_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Reason {
Invalid,
AlreadyExists,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
|
use crate::parser::{Lexer, Loc, ParserError};
#[derive(Debug)]
pub enum StrLitDecodeError {
Error,
}
impl From<ParserError> for StrLitDecodeError {
fn from(_: ParserError) -> Self {
StrLitDecodeError::Error
}
}
pub type StrLitDecodeResult<T> = Result<T, StrLitDecodeError>;
/// String literal, both `string` and `bytes`.
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct StrLit {
pub escaped: String,
}
impl StrLit {
/// May fail if not valid UTF8
pub fn decode_utf8(&self) -> StrLitDecodeResult<String> {
let mut lexer = Lexer {
input: &self.escaped,
pos: 0,
loc: Loc::start(),
};
let mut r = String::new();
while !lexer.eof() {
r.push(lexer.next_char_value()?);
}
Ok(r)
}
pub fn quoted(&self) -> String {
format!("\"{}\"", self.escaped)
}
}
|
use bio::pattern_matching::shift_and;
use std::fs;
pub fn run(filename: &str) {
let file = fs::read(filename).expect("Something went wrong reading the file");
let mut lines = file.split(|&b| b == b'\n');
let dna = lines.next().expect("Missing dna");
let pattern = lines.next().expect("Missing pattern");
let shiftand = shift_and::ShiftAnd::new(pattern);
for occ in shiftand.find_all(dna) {
print!("{} ", occ + 1);
}
println!();
}
|
pub trait Chunks {
type Next: Chunks;
fn chunk<F>(self, F) where F: for<'a> FnOnce(&'a [u8], Option<Self::Next>) + 'static;
fn attach(self, data: &[u8]) -> Fused<Self> { Fused(self, Some(data)) }
fn fuse<'a>(self) -> Fused<'a, Self> { Fused(self, None) }
}
pub struct Fused<'a, C>(C, Option<&'a [u8]>) where C: Chunks;
impl<'a, C: Chunks> Chunks for Fused<'a, C> {
type Next = C;
fn chunk<F>(self, cb: F)
where F: FnOnce(&[u8], Option<C>) + 'static {
match self.1 {
Some(data) => cb(data, Some(self.0)),
None => self.0.chunk(cb)
}
}
}
|
use std::collections::{HashMap, HashSet};
use std::env;
use std::path::PathBuf;
use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
use actix_web::web;
use chrono::Local;
use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher};
use crate::db;
/// 建立异步线程,监控文件改动,当改动的时候,就重新生成文件
pub fn watch_api_docs_change(data: web::Data<Mutex<db::Database>>) {
let current_dir = env::current_dir().expect("Failed to determine current directory");
let ignore_file_path = current_dir.join(".gitignore");
let current_dir = current_dir.to_str().unwrap().to_string();
thread::spawn(move || {
let (tx, rx) = channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_secs(2)).unwrap();
watcher
.watch(¤t_dir, RecursiveMode::Recursive)
.unwrap();
loop {
match rx.recv() {
Ok(event) => match event {
DebouncedEvent::NoticeWrite(f) => {
update_api_data(f, ¤t_dir, &ignore_file_path, data.clone());
}
DebouncedEvent::Create(f) => {
update_api_data(f, ¤t_dir, &ignore_file_path, data.clone());
}
DebouncedEvent::NoticeRemove(f) => {
update_api_data(f, ¤t_dir, &ignore_file_path, data.clone());
}
DebouncedEvent::Rename(_f1, f2) => {
update_api_data(f2, ¤t_dir, &ignore_file_path, data.clone());
}
_ => {}
},
Err(e) => println!("watch error: {:?}", e),
}
}
});
}
/// 发生文件改动/新增时,更新接口文档数据
/// README.md, json数据
fn update_api_data(
filepath: PathBuf,
current_dir: &str,
ignore_file_path: &PathBuf,
data: web::Data<Mutex<db::Database>>,
) {
if let Ok(ignore) = gitignore::File::new(ignore_file_path) {
let is_ignore = ignore.is_excluded(&filepath).unwrap();
if is_ignore {
return;
}
}
let filepath = filepath.to_str().unwrap();
let mut api_docs: HashMap<String, db::ApiDoc> = HashMap::new();
let mut api_data: HashMap<String, Vec<Arc<Mutex<db::ApiData>>>> = HashMap::new();
let mut fileindex_data: HashMap<String, HashSet<String>> = HashMap::new();
let websocket_api = Arc::new(Mutex::new(db::ApiData::default()));
let mut data = data.lock().unwrap();
let filename = filepath.trim_start_matches(&format!("{}/", current_dir));
// 暂时全部使用全局重新加载: 这里需要改进
*data = db::Database::load();
println!("{} data update done. {}", filename, Local::now());
return;
let mut delete_files: Vec<String> = Vec::new();
let mut parse_error_code = 0;
let mut menus: HashMap<String, db::Menu> = HashMap::new();
if filename == "README.md" {
let (basic_data, settings_value) = db::load_basic_data();
data.basic_data = basic_data;
data.settings = settings_value;
} else if filepath.ends_with(".md") {
// 全局重新加载
*data = db::Database::load();
println!("{} data update done. {}", filename, Local::now());
return;
} else if !filepath.ends_with(".json5") {
return;
} else if filename == "_settings.json5" {
// 全局重新加载
*data = db::Database::load();
println!("{} data update done. {}", filename, Local::now());
return;
} else if filename == "_auth.json5" {
// 加载auth
let auth_data = db::load_auth_data(&data.api_docs);
data.auth_doc = auth_data;
println!("{} data update done. {}", filename, Local::now());
return;
} else if filename.contains("_data/") {
// 如果修改的是_data里面的文件,需要通过fileindex_datal来找到对应文件更新
if let Some(ref_files) = data.fileindex_data.get(filename) {
// 把找到的文件全部重新load一遍
for ref_file in ref_files {
parse_error_code = db::Database::load_a_api_json_file(
ref_file,
&data.basic_data,
&mut api_data,
&mut api_docs,
websocket_api.clone(),
&mut fileindex_data,
&mut menus,
);
if parse_error_code == -2 {
delete_files.push(ref_file.to_string());
}
}
}
} else {
parse_error_code = db::Database::load_a_api_json_file(
filename,
&data.basic_data,
&mut api_data,
&mut api_docs,
websocket_api.clone(),
&mut fileindex_data,
&mut menus,
);
if parse_error_code == -2 {
delete_files.push(filename.to_string());
}
}
if parse_error_code < 0 {
// 没有解析错误,才会打印解析完成
for delete_file in &delete_files {
// 发生删除文件,要分别删除api_docs和api_data中的数据
let mut urls: Vec<String> = Vec::new();
if let Some(api_doc) = &data.api_docs.get(delete_file) {
// 删除 api_data中 api_doc包含的url
for api in api_doc.apis.iter() {
let url = &api.lock().unwrap().url;
urls.push(url.to_string());
}
}
// 删除api_doc
data.api_docs.remove(delete_file);
// 删除api_data中的url
for url in &urls {
data.api_data.remove(url);
}
if parse_error_code == -2 {
println!("deleted file {} {}", filename, Local::now());
}
}
}
for (k, v) in api_data {
data.api_data.insert(k, v);
}
for (k, v) in api_docs {
data.api_docs.insert(k, v);
}
data.websocket_api = websocket_api;
for (ref_file, doc_files) in fileindex_data {
if &ref_file != "" {
match data.fileindex_data.get_mut(&ref_file) {
Some(x) => {
for f in doc_files {
x.insert(f);
}
}
None => {
data.fileindex_data.insert(ref_file, doc_files);
}
}
}
}
if parse_error_code == 1 {
println!("{} data update done. {}", filename, Local::now());
}
}
|
use super::{Trader, Order, Action};
use crate::indicators::{Value, Indicator};
use crate::economy::Monetary;
pub struct And<T1, T2>
where
T1: Trader,
T2: Trader
{
trader1: T1,
trader2: T2
}
impl<T1, T2> Trader for And<T1, T2>
where
T1: Trader,
T2: Trader
{
type Indicators = (T1::Indicators, T2::Indicators);
fn initialize(base: &str, quote: &str) -> And<T1, T2> {
And {
trader1: T1::initialize(base, quote),
trader2: T2::initialize(base, quote),
}
}
fn evaluate(&mut self, (output1, output2): <Self::Indicators as Indicator>::Output) -> Option<Order> {
match (self.trader1.evaluate(output1), self.trader2.evaluate(output2)) {
(Some(Order::Limit(Action::Buy, quantity1, value1)), Some(Order::Limit(Action::Buy, quantity2, value2))) => {
Some(Order::Limit(Action::Buy, (quantity1 + quantity2) / 2.0, (value1 + value2) / 2.0))
},
(Some(Order::Limit(Action::Sell, quantity1, value1)), Some(Order::Limit(Action::Sell, quantity2, value2))) => {
Some(Order::Limit(Action::Sell, (quantity1 + quantity2) / 2.0, (value1 + value2) / 2.0))
},
(_, _) => {
None
}
}
}
} |
use std::{
ffi::c_int,
io::{self, Read, Write},
mem::size_of,
os::{
fd::{AsRawFd, RawFd},
unix::net::UnixStream,
},
};
use crate::exec::signal_fmt;
use crate::system::interface::ProcessId;
use super::CommandStatus;
type Prefix = u8;
type ParentData = c_int;
type MonitorData = c_int;
const PREFIX_LEN: usize = size_of::<Prefix>();
const PARENT_DATA_LEN: usize = size_of::<ParentData>();
const MONITOR_DATA_LEN: usize = size_of::<MonitorData>();
pub(super) struct BackchannelPair {
pub(super) parent: ParentBackchannel,
pub(super) monitor: MonitorBackchannel,
}
impl BackchannelPair {
pub(super) fn new() -> io::Result<Self> {
let (sock1, sock2) = UnixStream::pair()?;
#[cfg(debug_assertions)]
{
sock1.set_nonblocking(true)?;
sock2.set_nonblocking(true)?;
}
Ok(Self {
parent: ParentBackchannel {
socket: sock1,
#[cfg(debug_assertions)]
nonblocking_asserts: false,
},
monitor: MonitorBackchannel {
socket: sock2,
#[cfg(debug_assertions)]
nonblocking_asserts: false,
},
})
}
}
pub(super) enum ParentMessage {
IoError(c_int),
CommandStatus(CommandStatus),
CommandPid(ProcessId),
ShortRead,
}
impl ParentMessage {
const LEN: usize = PREFIX_LEN + PARENT_DATA_LEN;
const IO_ERROR: Prefix = 0;
const CMD_STAT_EXIT: Prefix = 1;
const CMD_STAT_TERM: Prefix = 2;
const CMD_STAT_STOP: Prefix = 3;
const CMD_PID: Prefix = 4;
const SHORT_READ: Prefix = 5;
fn from_parts(prefix: Prefix, data: ParentData) -> Self {
match prefix {
Self::IO_ERROR => Self::IoError(data),
Self::CMD_STAT_EXIT => Self::CommandStatus(CommandStatus::Exit(data)),
Self::CMD_STAT_TERM => Self::CommandStatus(CommandStatus::Term(data)),
Self::CMD_STAT_STOP => Self::CommandStatus(CommandStatus::Stop(data)),
Self::CMD_PID => Self::CommandPid(data),
Self::SHORT_READ => Self::ShortRead,
_ => unreachable!(),
}
}
fn to_parts(&self) -> (Prefix, ParentData) {
let prefix = match self {
ParentMessage::IoError(_) => Self::IO_ERROR,
ParentMessage::CommandStatus(CommandStatus::Exit(_)) => Self::CMD_STAT_EXIT,
ParentMessage::CommandStatus(CommandStatus::Term(_)) => Self::CMD_STAT_TERM,
ParentMessage::CommandStatus(CommandStatus::Stop(_)) => Self::CMD_STAT_STOP,
ParentMessage::CommandPid(_) => Self::CMD_PID,
ParentMessage::ShortRead => Self::SHORT_READ,
};
let data = match self {
ParentMessage::IoError(data) | ParentMessage::CommandPid(data) => *data,
ParentMessage::CommandStatus(status) => match status {
CommandStatus::Exit(data)
| CommandStatus::Term(data)
| CommandStatus::Stop(data) => *data,
},
ParentMessage::ShortRead => 0,
};
(prefix, data)
}
}
impl TryFrom<io::Error> for ParentMessage {
type Error = io::Error;
fn try_from(err: io::Error) -> Result<Self, Self::Error> {
err.raw_os_error()
.map(Self::IoError)
.or_else(|| (err.kind() == io::ErrorKind::UnexpectedEof).then_some(Self::ShortRead))
.ok_or(err)
}
}
impl From<CommandStatus> for ParentMessage {
fn from(status: CommandStatus) -> Self {
Self::CommandStatus(status)
}
}
/// A socket use for commmunication between the monitor and the parent process.
pub(super) struct ParentBackchannel {
socket: UnixStream,
#[cfg(debug_assertions)]
nonblocking_asserts: bool,
}
impl ParentBackchannel {
/// Send a [`MonitorMessage`].
///
/// Calling this method will block until the socket is ready for writing.
#[track_caller]
pub(super) fn send(&mut self, event: &MonitorMessage) -> io::Result<()> {
let mut buf = [0; MonitorMessage::LEN];
let (prefix_buf, data_buf) = buf.split_at_mut(PREFIX_LEN);
let (prefix, data) = event.to_parts();
prefix_buf.copy_from_slice(&prefix.to_ne_bytes());
data_buf.copy_from_slice(&data.to_ne_bytes());
self.socket.write_all(&buf).map_err(|err| {
#[cfg(debug_assertions)]
if self.nonblocking_asserts {
assert_ne!(err.kind(), io::ErrorKind::WouldBlock);
}
err
})
}
/// Receive a [`ParentMessage`].
///
/// Calling this method will block until the socket is ready for reading.
#[track_caller]
pub(super) fn recv(&mut self) -> io::Result<ParentMessage> {
let mut buf = [0; ParentMessage::LEN];
self.socket.read_exact(&mut buf).map_err(|err| {
#[cfg(debug_assertions)]
if self.nonblocking_asserts {
assert_ne!(err.kind(), io::ErrorKind::WouldBlock);
}
err
})?;
let (prefix_buf, data_buf) = buf.split_at(PREFIX_LEN);
let prefix = Prefix::from_ne_bytes(prefix_buf.try_into().unwrap());
let data = ParentData::from_ne_bytes(data_buf.try_into().unwrap());
Ok(ParentMessage::from_parts(prefix, data))
}
pub(super) fn set_nonblocking_asserts(&mut self, _doit: bool) {
#[cfg(debug_assertions)]
{
self.nonblocking_asserts = _doit;
}
}
}
impl AsRawFd for ParentBackchannel {
fn as_raw_fd(&self) -> RawFd {
self.socket.as_raw_fd()
}
}
/// Different messages exchanged between the monitor and the parent process using a [`ParentBackchannel`].
#[derive(PartialEq, Eq)]
pub(super) enum MonitorMessage {
ExecCommand,
Signal(c_int),
}
impl MonitorMessage {
const LEN: usize = PREFIX_LEN + MONITOR_DATA_LEN;
const EXEC_CMD: Prefix = 0;
const SIGNAL: Prefix = 1;
fn from_parts(prefix: Prefix, data: MonitorData) -> Self {
match prefix {
Self::EXEC_CMD => Self::ExecCommand,
Self::SIGNAL => Self::Signal(data),
_ => unreachable!(),
}
}
fn to_parts(&self) -> (Prefix, MonitorData) {
let prefix = match self {
MonitorMessage::ExecCommand => Self::EXEC_CMD,
MonitorMessage::Signal(_) => Self::SIGNAL,
};
let data = match self {
MonitorMessage::ExecCommand => 0,
MonitorMessage::Signal(data) => *data,
};
(prefix, data)
}
}
impl std::fmt::Debug for MonitorMessage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ExecCommand => "ExecCommand".fmt(f),
&Self::Signal(signal) => write!(f, "Signal({})", signal_fmt(signal)),
}
}
}
/// A socket use for commmunication between the monitor and the parent process.
pub(super) struct MonitorBackchannel {
socket: UnixStream,
#[cfg(debug_assertions)]
nonblocking_asserts: bool,
}
impl MonitorBackchannel {
/// Send a [`ParentMessage`].
///
/// Calling this method will block until the socket is ready for writing.
#[track_caller]
pub(super) fn send(&mut self, event: &ParentMessage) -> io::Result<()> {
let mut buf = [0; ParentMessage::LEN];
let (prefix_buf, data_buf) = buf.split_at_mut(PREFIX_LEN);
let (prefix, data) = event.to_parts();
prefix_buf.copy_from_slice(&prefix.to_ne_bytes());
data_buf.copy_from_slice(&data.to_ne_bytes());
self.socket.write_all(&buf).map_err(|err| {
#[cfg(debug_assertions)]
if self.nonblocking_asserts {
assert_ne!(err.kind(), io::ErrorKind::WouldBlock);
}
err
})
}
/// Receive a [`MonitorMessage`].
///
/// Calling this method will block until the socket is ready for reading.
#[track_caller]
pub(super) fn recv(&mut self) -> io::Result<MonitorMessage> {
let mut buf = [0; MonitorMessage::LEN];
self.socket.read_exact(&mut buf).map_err(|err| {
#[cfg(debug_assertions)]
if self.nonblocking_asserts {
assert_ne!(err.kind(), io::ErrorKind::WouldBlock);
}
err
})?;
let (prefix_buf, data_buf) = buf.split_at(PREFIX_LEN);
let prefix = Prefix::from_ne_bytes(prefix_buf.try_into().unwrap());
let data = MonitorData::from_ne_bytes(data_buf.try_into().unwrap());
Ok(MonitorMessage::from_parts(prefix, data))
}
pub(super) fn set_nonblocking_assertions(&mut self, _doit: bool) {
#[cfg(debug_assertions)]
{
self.nonblocking_asserts = _doit;
}
}
}
impl AsRawFd for MonitorBackchannel {
fn as_raw_fd(&self) -> RawFd {
self.socket.as_raw_fd()
}
}
|
enum blah { a, b, }
fn or_alt(q: blah) -> int {
alt q { a | b { 42 } }
}
fn main() {
assert (or_alt(a) == 42);
assert (or_alt(b) == 42);
}
|
/// Status holds a single Status of a single Commit
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Status {
pub context: Option<String>,
pub created_at: Option<String>,
pub creator: Option<crate::user::User>,
pub description: Option<String>,
pub id: Option<i64>,
pub status: Option<String>,
pub target_url: Option<String>,
pub updated_at: Option<String>,
pub url: Option<String>,
}
impl Status {
/// Create a builder for this object.
#[inline]
pub fn builder() -> StatusBuilder {
StatusBuilder {
body: Default::default(),
}
}
#[inline]
pub fn repo_get_combined_status_by_ref() -> StatusGetBuilder<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingRef> {
StatusGetBuilder {
inner: Default::default(),
_param_owner: core::marker::PhantomData,
_param_repo: core::marker::PhantomData,
_param_ref: core::marker::PhantomData,
}
}
#[inline]
pub fn repo_list_statuses() -> StatusGetBuilder1<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingSha> {
StatusGetBuilder1 {
inner: Default::default(),
_param_owner: core::marker::PhantomData,
_param_repo: core::marker::PhantomData,
_param_sha: core::marker::PhantomData,
}
}
}
impl Into<Status> for StatusBuilder {
fn into(self) -> Status {
self.body
}
}
/// Builder for [`Status`](./struct.Status.html) object.
#[derive(Debug, Clone)]
pub struct StatusBuilder {
body: self::Status,
}
impl StatusBuilder {
#[inline]
pub fn context(mut self, value: impl Into<String>) -> Self {
self.body.context = Some(value.into());
self
}
#[inline]
pub fn created_at(mut self, value: impl Into<String>) -> Self {
self.body.created_at = Some(value.into());
self
}
#[inline]
pub fn creator(mut self, value: crate::user::User) -> Self {
self.body.creator = Some(value.into());
self
}
#[inline]
pub fn description(mut self, value: impl Into<String>) -> Self {
self.body.description = Some(value.into());
self
}
#[inline]
pub fn id(mut self, value: impl Into<i64>) -> Self {
self.body.id = Some(value.into());
self
}
#[inline]
pub fn status(mut self, value: impl Into<String>) -> Self {
self.body.status = Some(value.into());
self
}
#[inline]
pub fn target_url(mut self, value: impl Into<String>) -> Self {
self.body.target_url = Some(value.into());
self
}
#[inline]
pub fn updated_at(mut self, value: impl Into<String>) -> Self {
self.body.updated_at = Some(value.into());
self
}
#[inline]
pub fn url(mut self, value: impl Into<String>) -> Self {
self.body.url = Some(value.into());
self
}
}
/// Builder created by [`Status::repo_get_combined_status_by_ref`](./struct.Status.html#method.repo_get_combined_status_by_ref) method for a `GET` operation associated with `Status`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct StatusGetBuilder<Owner, Repo, Ref> {
inner: StatusGetBuilderContainer,
_param_owner: core::marker::PhantomData<Owner>,
_param_repo: core::marker::PhantomData<Repo>,
_param_ref: core::marker::PhantomData<Ref>,
}
#[derive(Debug, Default, Clone)]
struct StatusGetBuilderContainer {
param_owner: Option<String>,
param_repo: Option<String>,
param_ref: Option<String>,
param_page: Option<i64>,
}
impl<Owner, Repo, Ref> StatusGetBuilder<Owner, Repo, Ref> {
/// owner of the repo
#[inline]
pub fn owner(mut self, value: impl Into<String>) -> StatusGetBuilder<crate::generics::OwnerExists, Repo, Ref> {
self.inner.param_owner = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of the repo
#[inline]
pub fn repo(mut self, value: impl Into<String>) -> StatusGetBuilder<Owner, crate::generics::RepoExists, Ref> {
self.inner.param_repo = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of branch/tag/commit
#[inline]
pub fn ref_(mut self, value: impl Into<String>) -> StatusGetBuilder<Owner, Repo, crate::generics::RefExists> {
self.inner.param_ref = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// page number of results
#[inline]
pub fn page(mut self, value: impl Into<i64>) -> Self {
self.inner.param_page = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for StatusGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::RefExists> {
type Output = Status;
const METHOD: http::Method = http::Method::GET;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/repos/{owner}/{repo}/commits/{ref}/statuses", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), ref=self.inner.param_ref.as_ref().expect("missing parameter ref?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.query(&[
("page", self.inner.param_page.as_ref().map(std::string::ToString::to_string))
]))
}
}
impl crate::client::ResponseWrapper<Status, StatusGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::RefExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
/// Builder created by [`Status::repo_list_statuses`](./struct.Status.html#method.repo_list_statuses) method for a `GET` operation associated with `Status`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct StatusGetBuilder1<Owner, Repo, Sha> {
inner: StatusGetBuilder1Container,
_param_owner: core::marker::PhantomData<Owner>,
_param_repo: core::marker::PhantomData<Repo>,
_param_sha: core::marker::PhantomData<Sha>,
}
#[derive(Debug, Default, Clone)]
struct StatusGetBuilder1Container {
param_owner: Option<String>,
param_repo: Option<String>,
param_sha: Option<String>,
param_sort: Option<String>,
param_state: Option<String>,
param_page: Option<i64>,
param_limit: Option<i64>,
}
impl<Owner, Repo, Sha> StatusGetBuilder1<Owner, Repo, Sha> {
/// owner of the repo
#[inline]
pub fn owner(mut self, value: impl Into<String>) -> StatusGetBuilder1<crate::generics::OwnerExists, Repo, Sha> {
self.inner.param_owner = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of the repo
#[inline]
pub fn repo(mut self, value: impl Into<String>) -> StatusGetBuilder1<Owner, crate::generics::RepoExists, Sha> {
self.inner.param_repo = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// sha of the commit
#[inline]
pub fn sha(mut self, value: impl Into<String>) -> StatusGetBuilder1<Owner, Repo, crate::generics::ShaExists> {
self.inner.param_sha = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// type of sort
#[inline]
pub fn sort(mut self, value: impl Into<String>) -> Self {
self.inner.param_sort = Some(value.into());
self
}
/// type of state
#[inline]
pub fn state(mut self, value: impl Into<String>) -> Self {
self.inner.param_state = Some(value.into());
self
}
/// page number of results to return (1-based)
#[inline]
pub fn page(mut self, value: impl Into<i64>) -> Self {
self.inner.param_page = Some(value.into());
self
}
/// page size of results
#[inline]
pub fn limit(mut self, value: impl Into<i64>) -> Self {
self.inner.param_limit = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for StatusGetBuilder1<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::ShaExists> {
type Output = Vec<Status>;
const METHOD: http::Method = http::Method::GET;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/repos/{owner}/{repo}/statuses/{sha}", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), sha=self.inner.param_sha.as_ref().expect("missing parameter sha?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.query(&[
("sort", self.inner.param_sort.as_ref().map(std::string::ToString::to_string)),
("state", self.inner.param_state.as_ref().map(std::string::ToString::to_string)),
("page", self.inner.param_page.as_ref().map(std::string::ToString::to_string)),
("limit", self.inner.param_limit.as_ref().map(std::string::ToString::to_string))
]))
}
}
impl crate::client::ResponseWrapper<Vec<Status>, StatusGetBuilder1<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::ShaExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
|
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use console_error_panic_hook::set_once as set_panic_hook;
use specs::prelude::*;
use wasm_bindgen::{prelude::*, JsCast};
use web_sys::{
console, Document, Event, EventTarget, HtmlInputElement, InputEvent, MouseEvent, Node,
};
use oak::browser::events::*;
use oak::markup::VirtualNode;
use oak::shrev::EventChannel;
use oak::state::State;
#[derive(Debug)]
struct Model {
field: String,
uid: u16,
tasks: Vec<Task>,
}
impl Default for Model {
fn default() -> Self {
Self {
field: "".to_owned(),
uid: 3,
tasks: vec![
Task {
description: "Hello World".to_owned(),
completed: false,
edits: None,
id: 1,
},
Task {
description: "Foo Bar".to_owned(),
completed: true,
edits: None,
id: 2,
},
],
}
}
}
#[derive(Default, Debug)]
struct Task {
description: String,
completed: bool,
edits: Option<String>,
id: u16,
}
#[derive(Debug)]
enum Msg {
UpdateField(String),
Add,
}
impl State<Msg> for Model {
fn update(&mut self, msg: &Msg) {
match msg {
Msg::UpdateField(field) => self.field = field.clone(),
Msg::Add => (),
}
}
}
#[wasm_bindgen]
pub fn main() {
set_panic_hook();
let mut world = World::new();
let mut dispatcher = DispatcherBuilder::new()
.with_thread_local(oak::browser::events::BrowserEventSystem {
reader_id: None,
receiver: None,
phantom: std::marker::PhantomData::<(MouseEvent, Msg)>,
})
.with_thread_local(oak::browser::events::BrowserEventSystem {
reader_id: None,
receiver: None,
phantom: std::marker::PhantomData::<(InputEvent, Msg)>,
})
.with_thread_local(oak::browser::events::EventLogger {
reader_id: None,
phantom: std::marker::PhantomData::<Msg>,
})
.with_thread_local(oak::state::StateUpdater {
reader_id: None,
phantom: std::marker::PhantomData::<(Msg, Model)>,
})
.with_thread_local(oak::state::StatefulSystem {
phantom: std::marker::PhantomData::<(Msg, Model, VirtualNode)>,
})
.with_thread_local(oak::browser::BrowserNodeCreator)
.with_thread_local(oak::browser::BrowserNodeMounter)
.with_thread_local(oak::browser::BrowserNodeUpdater::default())
.build();
dispatcher.setup(&mut world.res);
let window = web_sys::window().expect("window");
let document = window.document().expect("document");
let body = document.body().unwrap();
let body_entity = world
.create_entity()
.with(oak::browser::BrowserNode {
node: body.into(),
is_mounted: true,
})
.build();
let container = oak::markup::create_element(&mut world, "div", &[("class", "todomvc-wrapper")])
.with(oak::markup::VirtualNodeParent(body_entity))
.build();
let section = oak::markup::create_element(&mut world, "section", &[("class", "todoapp")])
.with(oak::markup::VirtualNodeParent(container))
.build();
{
let header = oak::markup::create_element(&mut world, "header", &[("class", "header")])
.with(oak::markup::VirtualNodeParent(section))
.build();
let h1 = oak::markup::create_element(&mut world, "h1", &[])
.with(oak::markup::VirtualNodeParent(header))
.build();
oak::markup::create_text(&mut world, "todos")
.with(oak::markup::VirtualNodeParent(h1))
.build();
oak::markup::create_element(
&mut world,
"input",
&[
("class", "new-todo"),
("placeholder", "What needs to be done?"),
("autofocus", "true"),
("name", "newTodo"),
],
)
.with(oak::markup::VirtualNodeParent(header))
.with(oak::browser::events::EventListeners(vec![(
"input".to_owned(),
Box::new(|e: &InputEvent| {
Msg::UpdateField(
e.target()
.unwrap()
.unchecked_into::<HtmlInputElement>()
.value(),
)
}),
)]))
.build();
}
build_task_list(&mut world, section, &Model::default());
let footer = oak::markup::create_element(&mut world, "footer", &[("class", "info")])
.with(oak::markup::VirtualNodeParent(container))
.build();
{
let footer_p1 = oak::markup::create_element(&mut world, "p", &[])
.with(oak::markup::VirtualNodeParent(footer))
.build();
oak::markup::create_text(&mut world, "Double-click to edit a todo")
.with(oak::markup::VirtualNodeParent(footer_p1))
.build();
}
{
let footer_p2 = oak::markup::create_element(&mut world, "p", &[])
.with(oak::markup::VirtualNodeParent(footer))
.build();
oak::markup::create_text(&mut world, "Written by ")
.with(oak::markup::VirtualNodeParent(footer_p2))
.build();
let a = oak::markup::create_element(&mut world, "a", &[("href", "http://sagan.software")])
.with(oak::markup::VirtualNodeParent(footer_p2))
.build();
oak::markup::create_text(&mut world, "sagan.software")
.with(oak::markup::VirtualNodeParent(a))
.build();
}
{
let footer_p3 = oak::markup::create_element(&mut world, "p", &[])
.with(oak::markup::VirtualNodeParent(footer))
.build();
oak::markup::create_text(&mut world, "Part of ")
.with(oak::markup::VirtualNodeParent(footer_p3))
.build();
let a2 = oak::markup::create_element(&mut world, "a", &[("href", "http://todomvc.com")])
.with(oak::markup::VirtualNodeParent(footer_p3))
.build();
oak::markup::create_text(&mut world, "TodoMVC")
.with(oak::markup::VirtualNodeParent(a2))
.build();
}
dispatcher.dispatch(&world.res);
world.maintain();
let world_rc = Rc::new(RefCell::new(world));
let dispatcher_rc = Rc::new(RefCell::new(dispatcher));
let world_rc2 = world_rc.clone();
let dispatcher_rc2 = dispatcher_rc.clone();
let cb = Closure::wrap(Box::new(move |event: MouseEvent| {
let mut world = world_rc2.borrow_mut();
world
.write_resource::<EventChannel<BrowserEvent<MouseEvent>>>()
.single_write(BrowserEvent {
name: "click".to_owned(),
event,
});
dispatcher_rc2.borrow_mut().dispatch(&world.res);
world.maintain();
}) as Box<dyn Fn(_)>);
let et: &web_sys::EventTarget = &web_sys::window().unwrap().document().unwrap();
et.add_event_listener_with_callback("click", cb.as_ref().unchecked_ref())
.unwrap();
cb.forget();
let world_rc2 = world_rc.clone();
let dispatcher_rc2 = dispatcher_rc.clone();
let cb = Closure::wrap(Box::new(move |event: InputEvent| {
let mut world = world_rc2.borrow_mut();
world
.write_resource::<EventChannel<BrowserEvent<InputEvent>>>()
.single_write(BrowserEvent {
name: "input".to_owned(),
event,
});
dispatcher_rc2.borrow_mut().dispatch(&world.res);
world.maintain();
}) as Box<dyn Fn(_)>);
let et: &web_sys::EventTarget = &web_sys::window().unwrap().document().unwrap();
et.add_event_listener_with_callback("input", cb.as_ref().unchecked_ref())
.unwrap();
cb.forget();
}
fn build_task_list(world: &mut World, parent: Entity, model: &Model) {
let section = oak::markup::create_element(world, "section", &[("class", "main")])
.with(oak::markup::VirtualNodeParent(parent))
.build();
oak::markup::create_element(
world,
"input",
&[
("class", "toggle-all"),
("id", "toggle-all"),
("type", "checkbox"),
("name", "toggle"),
],
)
.with(oak::markup::VirtualNodeParent(section))
.build();
{
let label = oak::markup::create_element(world, "label", &[("for", "toggle-all")])
.with(oak::markup::VirtualNodeParent(section))
.build();
oak::markup::create_text(world, "Mark all as completed")
.with(oak::markup::VirtualNodeParent(label))
.build();
}
let task_list = oak::markup::create_element(world, "ul", &[("class", "todo-list")])
.with(oak::markup::VirtualNodeParent(section))
.build();
for task in &model.tasks {
let li = oak::markup::create_element(world, "li", &[("class", "")])
.with(oak::markup::VirtualNodeParent(task_list))
.build();
let div = oak::markup::create_element(world, "div", &[("class", "view")])
.with(oak::markup::VirtualNodeParent(li))
.build();
oak::markup::create_element(world, "input", &[("class", "toggle"), ("type", "checkbox")])
.with(oak::markup::VirtualNodeParent(div))
.build();
{
let label = oak::markup::create_element(world, "label", &[])
.with(oak::markup::VirtualNodeParent(div))
.build();
oak::markup::create_text(world, &task.description)
.with(oak::markup::VirtualNodeParent(label))
.build();
}
oak::markup::create_element(world, "button", &[("class", "destroy")])
.with(oak::markup::VirtualNodeParent(div))
.build();
oak::markup::create_element(
world,
"input",
&[
("class", "edit"),
("value", &task.description),
("name", "title"),
],
)
.with(oak::markup::VirtualNodeParent(li))
.build();
}
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use executor_macro::{call, module};
use executor_primitives::{
errors, errors::ApplicationError, Context, ContextEnv, EmptyParams, Module as ModuleT,
ModuleResult, OpaqueModuleResult, StorageValue, Util,
};
use primitives::codec::{Decode, Encode};
use primitives::{codec, Address, Call, Event};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
pub struct Module<C, U>
where
C: Context,
U: Util,
{
env: Arc<ContextEnv>,
#[allow(dead_code)]
context: C,
util: U,
block_interval: StorageValue<Option<u64>, Self>,
admin: StorageValue<Admin, Self>,
authority: StorageValue<Address, Self>,
/// update admin proposal id
update_admin_proposal_id: StorageValue<u32, Self>,
/// update admin proposal
update_admin_proposal: StorageValue<UpdateAdminProposal, Self>,
/// update authority proposal id
update_authority_proposal_id: StorageValue<u32, Self>,
/// update authority proposal
update_authority_proposal: StorageValue<UpdateAuthorityProposal, Self>,
}
#[module]
impl<C: Context, U: Util> Module<C, U> {
const META_MODULE: bool = true;
const STORAGE_KEY: &'static [u8] = b"poa";
fn new(context: C, util: U) -> Self {
Self {
env: context.env(),
context: context.clone(),
util,
block_interval: StorageValue::new(context.clone(), b"block_interval"),
admin: StorageValue::new(context.clone(), b"admin"),
authority: StorageValue::new(context.clone(), b"authority"),
update_admin_proposal_id: StorageValue::new(
context.clone(),
b"update_admin_proposal_id",
),
update_admin_proposal: StorageValue::new(context.clone(), b"update_admin_proposal"),
update_authority_proposal_id: StorageValue::new(
context.clone(),
b"update_authority_proposal_id",
),
update_authority_proposal: StorageValue::new(context, b"update_authority_proposal"),
}
}
#[call(write = true)]
fn init(&self, _sender: Option<&Address>, params: InitParams) -> ModuleResult<()> {
if self.env.number != 0 {
return Err("Not genesis".into());
}
self.block_interval.set(¶ms.block_interval)?;
self.admin.set(¶ms.admin)?;
self.authority.set(¶ms.authority)?;
Ok(())
}
fn validate_init(&self, _sender: Option<&Address>, params: InitParams) -> ModuleResult<()> {
for (address, _) in ¶ms.admin.members {
self.util.validate_address(address)?;
}
self.util.validate_address(¶ms.authority)?;
Ok(())
}
#[call]
fn get_meta(&self, _sender: Option<&Address>, _params: EmptyParams) -> ModuleResult<Meta> {
let block_interval = self.block_interval.get()?;
let block_interval = block_interval.ok_or("Unexpected none")?;
let meta = Meta { block_interval };
Ok(meta)
}
#[call]
fn get_authority(
&self,
_sender: Option<&Address>,
_params: EmptyParams,
) -> ModuleResult<Address> {
let authority = self.authority.get()?;
let authority = authority.ok_or("Unexpected none")?;
Ok(authority)
}
#[call]
fn get_admin(&self, _sender: Option<&Address>, _params: EmptyParams) -> ModuleResult<Admin> {
let admin = self.admin.get()?;
let admin = admin.ok_or("Unexpected none")?;
Ok(admin)
}
#[call(write = true)]
fn update_admin(
&self,
sender: Option<&Address>,
params: UpdateAdminParams,
) -> ModuleResult<()> {
let sender = sender.ok_or(ApplicationError::Unsigned)?;
let (old_threshold, old_members) = self.verify_sender(sender)?;
// create a proposal
let new_admin = aggregate_admin(params.admin);
let proposal_id = self.update_admin_proposal_id.get()?.unwrap_or(1u32);
let mut proposal = UpdateAdminProposal {
proposal_id,
admin: new_admin,
vote: vec![],
};
self.context.emit_event(Event::from_data(
"UpdateAdminProposalCreated".to_string(),
UpdateAdminProposalCreated {
proposal: proposal.clone(),
},
)?)?;
self.update_admin_vote_and_pass(sender, &mut proposal, old_threshold, &old_members)
}
#[call(write = true)]
fn update_admin_vote(
&self,
sender: Option<&Address>,
params: UpdateAdminVoteParams,
) -> ModuleResult<()> {
let sender = sender.ok_or(ApplicationError::Unsigned)?;
let proposal = self.update_admin_proposal.get()?;
let mut proposal = proposal.ok_or("Proposal not found")?;
if proposal.proposal_id != params.proposal_id {
return Err("Proposal id not match".into());
}
let (old_threshold, old_members) = self.verify_sender(sender)?;
self.update_admin_vote_and_pass(sender, &mut proposal, old_threshold, &old_members)
}
#[call(write = true)]
fn update_authority(
&self,
sender: Option<&Address>,
params: UpdateAuthorityParams,
) -> ModuleResult<()> {
let sender = sender.ok_or(ApplicationError::Unsigned)?;
let (old_threshold, old_members) = self.verify_sender(sender)?;
let authority = params.authority;
// create a proposal
let proposal_id = self.update_authority_proposal_id.get()?.unwrap_or(1u32);
let mut proposal = UpdateAuthorityProposal {
proposal_id,
authority,
vote: vec![],
};
self.context.emit_event(Event::from_data(
"UpdateAuthorityProposalCreated".to_string(),
UpdateAuthorityProposalCreated {
proposal: proposal.clone(),
},
)?)?;
self.update_authority_vote_and_pass(sender, &mut proposal, old_threshold, &old_members)
}
#[call(write = true)]
fn update_authority_vote(
&self,
sender: Option<&Address>,
params: UpdateAuthorityVoteParams,
) -> ModuleResult<()> {
let sender = sender.ok_or(ApplicationError::Unsigned)?;
let proposal = self.update_authority_proposal.get()?;
let mut proposal = proposal.ok_or("Proposal not found")?;
if proposal.proposal_id != params.proposal_id {
return Err("Proposal id not match".into());
}
let (old_threshold, old_members) = self.verify_sender(sender)?;
self.update_authority_vote_and_pass(sender, &mut proposal, old_threshold, &old_members)
}
fn verify_sender(&self, sender: &Address) -> ModuleResult<(u32, HashMap<Address, u32>)> {
let admin = self.admin.get()?;
let admin = admin.ok_or("Admin not found")?;
let threshold = admin.threshold;
let members = admin.members.into_iter().collect::<HashMap<_, _>>();
if !members.contains_key(sender) {
return Err("Not admin".into());
}
Ok((threshold, members))
}
fn update_admin_vote_and_pass(
&self,
sender: &Address,
proposal: &mut UpdateAdminProposal,
old_threshold: u32,
old_members: &HashMap<Address, u32>,
) -> ModuleResult<()> {
// vote for the proposal
if !proposal.vote.contains(sender) {
proposal.vote.push(sender.clone());
}
self.context.emit_event(Event::from_data(
"UpdateAdminProposalVoted".to_string(),
UpdateAdminProposalVoted {
proposal: proposal.clone(),
},
)?)?;
// pass a proposal
let sum = proposal
.vote
.iter()
.fold(0u32, |x, v| x + *old_members.get(v).unwrap_or(&0u32));
let mut pass = false;
if sum >= old_threshold {
self.admin.set(&proposal.admin)?;
pass = true;
self.context.emit_event(Event::from_data(
"UpdateAdminProposalPassed".to_string(),
UpdateAdminProposalPassed {
proposal: proposal.clone(),
},
)?)?;
}
if pass {
self.update_admin_proposal.delete()?;
} else {
self.update_admin_proposal.set(&proposal)?;
};
self.update_admin_proposal_id
.set(&(proposal.proposal_id + 1))?;
Ok(())
}
fn update_authority_vote_and_pass(
&self,
sender: &Address,
proposal: &mut UpdateAuthorityProposal,
old_threshold: u32,
old_members: &HashMap<Address, u32>,
) -> ModuleResult<()> {
// vote for the proposal
if !proposal.vote.contains(sender) {
proposal.vote.push(sender.clone());
}
self.context.emit_event(Event::from_data(
"UpdateAuthorityProposalVoted".to_string(),
UpdateAuthorityProposalVoted {
proposal: proposal.clone(),
},
)?)?;
// pass a proposal
let sum = proposal
.vote
.iter()
.fold(0u32, |x, v| x + *old_members.get(v).unwrap_or(&0u32));
let mut pass = false;
if sum >= old_threshold {
self.authority.set(&proposal.authority)?;
pass = true;
self.context.emit_event(Event::from_data(
"UpdateAuthorityProposalPassed".to_string(),
UpdateAuthorityProposalPassed {
proposal: proposal.clone(),
},
)?)?;
}
if pass {
self.update_authority_proposal.delete()?;
} else {
self.update_authority_proposal.set(&proposal)?;
};
self.update_authority_proposal_id
.set(&(proposal.proposal_id + 1))?;
Ok(())
}
}
fn aggregate_admin(admin: Admin) -> Admin {
let threshold = admin.threshold;
let members = admin.members;
let mut new_members = Vec::<(Address, u32)>::new();
for (address, weight) in members {
if weight > 0 {
match new_members.iter().position(|x| x.0 == address) {
Some(position) => {
let find = new_members.get_mut(position).unwrap();
find.1 += weight;
}
None => new_members.push((address, weight)),
}
}
}
Admin {
threshold,
members: new_members,
}
}
#[derive(Encode, Decode, Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct Admin {
pub threshold: u32,
pub members: Vec<(Address, u32)>,
}
#[derive(Encode, Decode, Debug, PartialEq, Deserialize)]
pub struct InitParams {
pub block_interval: Option<u64>,
pub admin: Admin,
pub authority: Address,
}
#[derive(Encode, Decode, Debug, PartialEq, Clone, Serialize)]
pub struct Meta {
pub block_interval: Option<u64>,
}
#[derive(Encode, Decode, Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct UpdateAdminProposal {
pub proposal_id: u32,
pub admin: Admin,
pub vote: Vec<Address>,
}
#[derive(Encode, Decode, Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct UpdateAuthorityProposal {
pub proposal_id: u32,
pub authority: Address,
pub vote: Vec<Address>,
}
#[derive(Encode, Decode, Debug, PartialEq)]
pub struct UpdateAdminParams {
pub admin: Admin,
}
#[derive(Encode, Decode, Debug, PartialEq)]
pub struct UpdateAdminVoteParams {
pub proposal_id: u32,
}
#[derive(Encode, Decode, Debug, PartialEq)]
pub struct UpdateAuthorityParams {
pub authority: Address,
}
#[derive(Encode, Decode, Debug, PartialEq)]
pub struct UpdateAuthorityVoteParams {
pub proposal_id: u32,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAdminProposalCreated {
pub proposal: UpdateAdminProposal,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAdminProposalVoted {
pub proposal: UpdateAdminProposal,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAdminProposalPassed {
pub proposal: UpdateAdminProposal,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAuthorityProposalCreated {
pub proposal: UpdateAuthorityProposal,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAuthorityProposalVoted {
pub proposal: UpdateAuthorityProposal,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateAuthorityProposalPassed {
pub proposal: UpdateAuthorityProposal,
}
|
//! Contains the various structs and methods for handling JSON objects.
pub mod award;
pub mod comment;
pub mod link;
#[doc(hidden)]
pub mod listing;
pub mod message;
pub mod misc;
#[doc(hidden)]
pub mod more;
pub mod prelude;
pub mod subreddit;
#[doc(hidden)]
pub mod thing;
pub mod user;
pub mod usersubreddit;
|
use std::{
error::Error,
path::{Path, PathBuf},
};
use teraron::Mode;
pub const GRAMMAR: &str = "components/syntax/src/grammar.ron";
pub const SYNTAX_KINDS: &str = "components/syntax/src/syntax/generated.rs.tera";
pub const AST: &str = "components/syntax/src/ast/generated.rs.tera";
fn main() {
generate(Mode::Overwrite).expect("Generate command failed");
}
fn generate(mode: Mode) -> Result<(), Box<dyn Error>> {
let grammar = project_root().join(GRAMMAR);
let syntax_kinds = project_root().join(SYNTAX_KINDS);
let ast = project_root().join(AST);
eprintln!("Using {} to generate:", grammar.display());
eprintln!(" 1. source of {}", syntax_kinds.display());
teraron::generate(&syntax_kinds, &grammar, mode)?;
eprintln!(" 2. Generating source of {}", ast.display());
teraron::generate(&ast, &grammar, mode)?;
Ok(())
}
fn project_root() -> PathBuf {
Path::new(&env!("CARGO_MANIFEST_DIR"))
.ancestors()
.nth(2)
.unwrap()
.to_path_buf()
}
|
//! VapourSynth script-related things.
#[cfg(not(feature = "gte-vsscript-api-32"))]
use std::sync::Mutex;
use std::sync::Once;
use vapoursynth_sys as ffi;
#[cfg(not(feature = "gte-vsscript-api-32"))]
lazy_static! {
static ref FFI_CALL_MUTEX: Mutex<()> = Mutex::new(());
}
// Some `vsscript_*` function calls have threading issues. Protect them with a mutex.
// https://github.com/vapoursynth/vapoursynth/issues/367
macro_rules! call_vsscript {
($call:expr) => {{
// Fixed in VSScript API 3.2.
// TODO: also not needed when we're running API 3.2 even without a feature.
#[cfg(not(feature = "gte-vsscript-api-32"))]
let _lock = crate::vsscript::FFI_CALL_MUTEX.lock();
$call
}};
}
/// Ensures `vsscript_init()` has been called at least once.
// TODO: `vsscript_init()` is already thread-safe with `std::call_once()`, maybe this can be done
// differently to remove the thread protection on Rust's side? An idea is to have a special type
// which calls `vsscript_init()` in `new()` and `vsscript_finalize()` in `drop()` and have the rest
// of the API accessible through that type, however that could become somewhat unergonomic with
// having to store its lifetime everywhere and potentially pass it around the threads.
#[inline]
pub(crate) fn maybe_initialize() {
static ONCE: Once = Once::new();
ONCE.call_once(|| unsafe {
ffi::vsscript_init();
// Verify the VSScript API version.
#[cfg(feature = "gte-vsscript-api-31")]
{
fn split_version(version: i32) -> (i32, i32) {
(version >> 16, version & 0xFFFF)
}
let vsscript_version = ffi::vsscript_getApiVersion();
let (major, minor) = split_version(vsscript_version);
let (my_major, my_minor) = split_version(ffi::VSSCRIPT_API_VERSION);
if my_major != major {
panic!(
"Invalid VSScript major API version (expected: {}, got: {})",
my_major, major
);
} else if my_minor > minor {
panic!(
"Invalid VSScript minor API version (expected: >= {}, got: {})",
my_minor, minor
);
}
}
});
}
mod errors;
pub use self::errors::{Error, VSScriptError};
mod environment;
pub use self::environment::{Environment, EvalFlags};
|
use rusqlite::Connection;
pub fn conn(p: String) -> Connection {
let c = Connection::open(p).unwrap();
c.execute( include_str!("sql/create.sql") , &[]).expect("erorr db create_table");
c
}
pub fn insert_money(c: &mut Connection, vals: Vec<(String, u32)>) {
let tx = c.transaction().expect("transaction failed");
for (k,v) in vals {
tx.execute( include_str!("sql/insert.sql") , &[&k, &v]).expect("db tx insert error");
}
tx.commit().expect("commit failed");
} |
extern crate wasm_bindgen;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
#[derive(Debug)]
pub struct Node<'a, T: PartialOrd> {
data: &'a T,
left: Option<Box<Node<'a, T>>>,
right: Option<Box<Node<'a, T>>>,
}
impl<'a, T: PartialOrd> Node<'a, T> {
pub fn new(data: &'a T) -> Self {
return Node {
data,
left: None,
right: None,
};
}
pub fn insert(&mut self, data: &'a T) {
let target_node = if data < self.data {
&mut self.left
} else {
&mut self.right
};
match target_node {
&mut None => *target_node = Some(Box::new(Node::new(data))),
&mut Some(ref mut boxed_node) => boxed_node.insert(data),
}
}
}
|
use crate::{lib};
#[derive(Debug)]
pub enum Error {
MissingPin,
}
pub enum DigitalOutputMode {
On,
Off
}
pub enum DigitalReadMode {
On,
Off
}
pub fn digital_output_mode(pin: &lib::pin::PinGroup, mode: DigitalOutputMode) -> Result<(), Error>{
if pin.number > 32 {
return Err(Error::MissingPin);
}
let dirset = 0x41008008 + (0x80 * (pin.group as u32) );
match mode {
DigitalOutputMode::Off => {
unsafe {
*(dirset as *mut u32) &= !(1 << pin.number);
}
},
DigitalOutputMode::On => {
unsafe {
*(dirset as *mut u32) |= 1 << pin.number;
}
}
}
Ok(())
}
/// 出力可能なピンをHighにする
///
/// 無効なピンを指定されたときにはErrorを返却
/// ## Examples
/// PA15のピンをHighにするとき
/// ```
/// digital_output_mode(Group::group1, 15, DigitalReadMode::On).unwrap();
/// digital_high(Group::group1, 15).unwrap()
/// ```
pub fn digital_high(pin: &lib::pin::PinGroup) -> Result<(), Error> {
if pin.number > 32 {
return Err(Error::MissingPin);
}
let outset = 0x41008018 + (0x80 * (pin.group as u32));
unsafe {
*(outset as *mut u32) |= 1 << pin.number;
}
Ok(())
}
/// 出力可能なピンをLowにする
///
/// 無効なピンを指定されたときにはErrorを返却
/// ## Examples
/// PA15のピンをLowにするとき
/// ```
/// digital_output_mode(Group::group1, 15, DigitalReadMode::On).unwrap();
/// digital_low(Group::group1, 15).unwrap()
/// ```
pub fn digital_low(pin: &lib::pin::PinGroup) -> Result<(), Error> {
if pin.number > 32 {
return Err(Error::MissingPin);
}
let outclr = 0x41008014 + (0x80 * (pin.group as u32));
unsafe {
*(outclr as *mut u32) |= 1 << pin.number;
}
Ok(())
}
/// ピン入力を検出できるようにする
///
/// 無効なピンを指定されたときにはErrorを返却
/// ## Examples
/// ```
/// digital_read_mode(Group::group3, 26, DigitalReadMode::On).unwrap();
/// digital_pin_read(Group::group3, 26).unwrap()
/// ```
pub fn digital_read_mode(pin: &lib::pin::PinGroup, mode: DigitalReadMode) -> Result<(), Error> {
if pin.number > 32 {
return Err(Error::MissingPin);
}
let pin_config = 0x41008040 + (0x80 * (pin.group as u32)) + (0x01 * pin.number);
match mode {
DigitalReadMode::On => {
unsafe {
// INENビット(Input Enable)を有効
*(pin_config as *mut u8) |= 2;
}
},
DigitalReadMode::Off => {
unsafe {
// INENビット(Input Enable)を無効
*(pin_config as *mut u8) &= !2;
}
}
}
Ok(())
}
/// ピン入力状態を取得
///
/// 入力状態のときtrue,未入力のときfalse
///
/// 無効なピンを指定されたときにはErrorを返却
///
/// ## Examples
/// ```
/// digital_read_mode(Group::group3, 26, DigitalReadMode::On).unwrap();
/// digital_pin_read(Group::group3, 26).unwrap()
/// ```
pub fn digital_pin_read(pin: &lib::pin::PinGroup) -> Result<bool, Error> {
if pin.number > 32 {
return Err(Error::MissingPin);
}
let pin_in = 0x41008020 + (0x80 * (pin.group as u32));
unsafe {
Ok(*(pin_in as *mut u32) & (1 << pin.number) == 0)
}
} |
#[doc = "Register `MACHWF1R` reader"]
pub type R = crate::R<MACHWF1R_SPEC>;
#[doc = "Field `RXFIFOSIZE` reader - MTL Receive FIFO Size"]
pub type RXFIFOSIZE_R = crate::FieldReader;
#[doc = "Field `TXFIFOSIZE` reader - MTL Transmit FIFO Size"]
pub type TXFIFOSIZE_R = crate::FieldReader;
#[doc = "Field `OSTEN` reader - One-Step Timestamping Enable"]
pub type OSTEN_R = crate::BitReader;
#[doc = "Field `PTOEN` reader - PTP Offload Enable"]
pub type PTOEN_R = crate::BitReader;
#[doc = "Field `ADVTHWORD` reader - IEEE 1588 High Word Register Enable"]
pub type ADVTHWORD_R = crate::BitReader;
#[doc = "Field `ADDR64` reader - Address width"]
pub type ADDR64_R = crate::FieldReader;
#[doc = "Field `DCBEN` reader - DCB Feature Enable"]
pub type DCBEN_R = crate::BitReader;
#[doc = "Field `SPHEN` reader - Split Header Feature Enable"]
pub type SPHEN_R = crate::BitReader;
#[doc = "Field `TSOEN` reader - TCP Segmentation Offload Enable"]
pub type TSOEN_R = crate::BitReader;
#[doc = "Field `DBGMEMA` reader - DMA Debug Registers Enable"]
pub type DBGMEMA_R = crate::BitReader;
#[doc = "Field `AVSEL` reader - AV Feature Enable"]
pub type AVSEL_R = crate::BitReader;
#[doc = "Field `HASHTBLSZ` reader - Hash Table Size"]
pub type HASHTBLSZ_R = crate::FieldReader;
#[doc = "Field `L3L4FNUM` reader - Total number of L3 or L4 Filters"]
pub type L3L4FNUM_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:4 - MTL Receive FIFO Size"]
#[inline(always)]
pub fn rxfifosize(&self) -> RXFIFOSIZE_R {
RXFIFOSIZE_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bits 6:10 - MTL Transmit FIFO Size"]
#[inline(always)]
pub fn txfifosize(&self) -> TXFIFOSIZE_R {
TXFIFOSIZE_R::new(((self.bits >> 6) & 0x1f) as u8)
}
#[doc = "Bit 11 - One-Step Timestamping Enable"]
#[inline(always)]
pub fn osten(&self) -> OSTEN_R {
OSTEN_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - PTP Offload Enable"]
#[inline(always)]
pub fn ptoen(&self) -> PTOEN_R {
PTOEN_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - IEEE 1588 High Word Register Enable"]
#[inline(always)]
pub fn advthword(&self) -> ADVTHWORD_R {
ADVTHWORD_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bits 14:15 - Address width"]
#[inline(always)]
pub fn addr64(&self) -> ADDR64_R {
ADDR64_R::new(((self.bits >> 14) & 3) as u8)
}
#[doc = "Bit 16 - DCB Feature Enable"]
#[inline(always)]
pub fn dcben(&self) -> DCBEN_R {
DCBEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Split Header Feature Enable"]
#[inline(always)]
pub fn sphen(&self) -> SPHEN_R {
SPHEN_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - TCP Segmentation Offload Enable"]
#[inline(always)]
pub fn tsoen(&self) -> TSOEN_R {
TSOEN_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - DMA Debug Registers Enable"]
#[inline(always)]
pub fn dbgmema(&self) -> DBGMEMA_R {
DBGMEMA_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - AV Feature Enable"]
#[inline(always)]
pub fn avsel(&self) -> AVSEL_R {
AVSEL_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bits 24:25 - Hash Table Size"]
#[inline(always)]
pub fn hashtblsz(&self) -> HASHTBLSZ_R {
HASHTBLSZ_R::new(((self.bits >> 24) & 3) as u8)
}
#[doc = "Bits 27:30 - Total number of L3 or L4 Filters"]
#[inline(always)]
pub fn l3l4fnum(&self) -> L3L4FNUM_R {
L3L4FNUM_R::new(((self.bits >> 27) & 0x0f) as u8)
}
}
#[doc = "HW feature 1 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`machwf1r::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACHWF1R_SPEC;
impl crate::RegisterSpec for MACHWF1R_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`machwf1r::R`](R) reader structure"]
impl crate::Readable for MACHWF1R_SPEC {}
#[doc = "`reset()` method sets MACHWF1R to value 0x1184_1904"]
impl crate::Resettable for MACHWF1R_SPEC {
const RESET_VALUE: Self::Ux = 0x1184_1904;
}
|
mod guessing;
mod funcs;
fn main() {
guessing::guessing_game(10);
}
|
/// <summary>
/// 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder.
/// What is the smallest positive number that is evenly divisible by all of the numbers from 1 to 20?
/// </summary>
/// <returns></returns>
pub fn compute() -> String {
fn all_are_factors(n: u64, trial: u64) -> bool {
if n == 1 { true }
else if !(::euler_library::math_library::is_multiple(trial, n)) { false }
else { all_are_factors(n - 1, trial) }
}
fn find_solution(n: u64, trial: u64) -> u64 {
let mut result = trial;
while !all_are_factors(n, result) { result = result + trial }
result
}
let n: u64 = 20;
let primes = ::euler_library::math_library::get_prime(n);
let mut gap = 1;
for prime in primes { gap = gap * prime; }
let result = find_solution(n, gap);
result.to_string()
}
|
// Recursion
fn factorial(value: i64) -> i64 {
return if value == 1 { value } else { value * factorial(value - 1) };
}
fn main() {
let a = factorial(3);
println!("data: {}", a);
} |
use std::error::Error;
use std::sync::mpsc;
use arrayvec::ArrayVec;
const MIDI_MSG_MAX_LEN: usize = 16;
pub fn get_input_ports() -> Result<Vec<String>, Box<dyn Error>> {
let midi_in = midir::MidiInput::new("input_device_checker")?;
(0..midi_in.port_count())
.map(|i| Ok(midi_in.port_name(i)?))
.collect()
}
pub fn listen_events(
name: &str,
port: usize,
chan: mpsc::SyncSender<MidiMessage>,
) -> Result<midir::MidiInputConnection<mpsc::SyncSender<MidiMessage>>, Box<dyn Error>> {
let mut midi_in = midir::MidiInput::new(&format!("{}-input", name))?;
midi_in.ignore(midir::Ignore::None);
Ok(midi_in.connect(port, &format!("{}-{}-conn", name, port), on_midi_message, chan)?)
}
#[derive(Debug)]
pub enum MidiMessage {
ActiveSensing,
Note {
channel: u8,
key: u8,
velocity: u8,
on: bool,
},
// Sustain {
// channel: u8,
// on: bool,
// },
ControlChange {
channel: u8,
controller: u8,
value: u8,
},
Unknown {
msg: ArrayVec::<[u8; MIDI_MSG_MAX_LEN]>,
total_len: usize,
}
}
pub fn decode_message(msg: &[u8]) -> Option<MidiMessage> {
if msg.len() == 0 {
return None;
}
let hi_nibble = msg[0] >> 4;
let lo_nibble = msg[0] & 0xf;
match hi_nibble {
0x8 | 0x9 => Some(MidiMessage::Note {
channel: lo_nibble,
key: msg[1],
velocity: msg[2],
on: hi_nibble == 0x9,
}),
0xb => Some(MidiMessage::ControlChange {
channel: lo_nibble,
controller: msg[1],
value: msg[2],
}),
0xf => match lo_nibble {
0xe => Some(MidiMessage::ActiveSensing),
_ => None,
},
_ => None,
}
}
fn on_midi_message(_t: u64, bytes: &[u8], chan: &mut mpsc::SyncSender<MidiMessage>) {
if let Some(msg) = decode_message(bytes) {
chan.send(msg).unwrap();
} else {
let total_len = bytes.len();
let msg = bytes.iter()
.cloned()
.take(MIDI_MSG_MAX_LEN)
.collect();
chan.send(MidiMessage::Unknown { msg, total_len }).unwrap();
}
}
|
//! Types and utilities for encoding and decoding ROTMG packets.
//!
//! This crate provides functionality to represent and manipulate ROTMG packets.
//! It defines structs for every packet type known at the time of writing, as
//! well as traits and implementations to allow encoding/decoding the packets as
//! bytes, as they are in the official client and server.
//!
//! If desired, `serde` support can be enabled with the `serde` feature flag,
//! allowing packets to be serialized and deserialized to arbitrary formats.
#![warn(missing_docs)]
pub mod adapters;
mod mappings;
mod parameters;
pub mod raw;
pub mod structured;
pub use mappings::PacketMappings;
pub use parameters::{BasicParameters, Parameters};
|
//! ### Parsers for the data
use super::file::*;
use assembly_core::nom::{
number::complete::{le_f32, le_u32, le_u8},
IResult,
};
pub fn parse_terrain_header(input: &[u8]) -> IResult<&[u8], TerrainHeader> {
let (input, version) = le_u8(input)?;
let (input, value_1) = le_u8(input)?;
let (input, value_2) = le_u8(input)?;
let (input, chunk_count) = le_u32(input)?;
let (input, width_in_chunks) = le_u32(input)?;
let (input, height_in_chunks) = le_u32(input)?;
Ok((
input,
TerrainHeader {
version,
value_1,
value_2,
chunk_count,
width_in_chunks,
height_in_chunks,
},
))
}
#[allow(clippy::just_underscores_and_digits)]
pub fn parse_height_map_header(input: &[u8]) -> IResult<&[u8], HeightMapHeader> {
let (input, width) = le_u32(input)?;
let (input, height) = le_u32(input)?;
let (input, pos_x) = le_f32(input)?;
let (input, pos_z) = le_f32(input)?;
let (input, _1) = le_u32(input)?;
let (input, _2) = le_u32(input)?;
let (input, _3) = le_u32(input)?;
let (input, _4) = le_u32(input)?;
let (input, _5) = le_f32(input)?;
Ok((
input,
HeightMapHeader {
width,
height,
pos_x,
pos_z,
_1,
_2,
_3,
_4,
_5,
},
))
}
|
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTOMATICALLY GENERATED from the SPIR-V JSON grammar:
// external/spirv.core.grammar.json.
// DO NOT MODIFY!
impl Context {
pub fn type_void(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::Void,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_bool(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::Bool,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_int(&mut self, width: u32, signedness: u32) -> TypeToken {
let t = Type {
ty: TypeEnum::Int {
width: width,
signedness: signedness,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_float(&mut self, width: u32) -> TypeToken {
let t = Type {
ty: TypeEnum::Float { width: width },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_vector(&mut self, component_type: TypeToken, component_count: u32) -> TypeToken {
let t = Type {
ty: TypeEnum::Vector {
component_type: component_type,
component_count: component_count,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_matrix(&mut self, column_type: TypeToken, column_count: u32) -> TypeToken {
let t = Type {
ty: TypeEnum::Matrix {
column_type: column_type,
column_count: column_count,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_image(
&mut self,
sampled_type: TypeToken,
dim: spirv::Dim,
depth: u32,
arrayed: u32,
ms: u32,
sampled: u32,
image_format: spirv::ImageFormat,
access_qualifier: Option<spirv::AccessQualifier>,
) -> TypeToken {
let t = Type {
ty: TypeEnum::Image {
sampled_type: sampled_type,
dim: dim,
depth: depth,
arrayed: arrayed,
ms: ms,
sampled: sampled,
image_format: image_format,
access_qualifier: access_qualifier,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_sampler(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::Sampler,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_sampled_image(&mut self, image_type: TypeToken) -> TypeToken {
let t = Type {
ty: TypeEnum::SampledImage { image_type: image_type },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_array(&mut self, element_type: TypeToken, length: ConstantToken) -> TypeToken {
let t = Type {
ty: TypeEnum::Array {
element_type: element_type,
length: length,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_runtime_array(&mut self, element_type: TypeToken) -> TypeToken {
let t = Type {
ty: TypeEnum::RuntimeArray { element_type: element_type },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_opaque(&mut self, type_name: String) -> TypeToken {
let t = Type {
ty: TypeEnum::Opaque { type_name: type_name },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_pointer(
&mut self,
storage_class: spirv::StorageClass,
pointee_type: TypeToken,
) -> TypeToken {
let t = Type {
ty: TypeEnum::Pointer {
storage_class: storage_class,
pointee_type: pointee_type,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_function(
&mut self,
return_type: TypeToken,
parameter_types: Vec<TypeToken>,
) -> TypeToken {
let t = Type {
ty: TypeEnum::Function {
return_type: return_type,
parameter_types: parameter_types,
},
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_event(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::Event,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_device_event(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::DeviceEvent,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_reserve_id(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::ReserveId,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_queue(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::Queue,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_pipe(&mut self, qualifier: spirv::AccessQualifier) -> TypeToken {
let t = Type {
ty: TypeEnum::Pipe { qualifier: qualifier },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_forward_pointer(&mut self, storage_class: spirv::StorageClass) -> TypeToken {
let t = Type {
ty: TypeEnum::ForwardPointer { storage_class: storage_class },
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_pipe_storage(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::PipeStorage,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
pub fn type_named_barrier(&mut self) -> TypeToken {
let t = Type {
ty: TypeEnum::NamedBarrier,
decorations: BTreeSet::new(),
};
if let Some(index) = self.types.iter().position(|x| *x == t) {
TypeToken::new(index)
} else {
self.types.push(t);
TypeToken::new(self.types.len() - 1)
}
}
}
|
pub fn is_armstrong_number(num: u32) -> bool {
let digits = get_digits_from_num(num);
let sum = digits.iter().map(|d| d.pow(digits.len() as u32)).sum::<u32>();
num == sum
}
fn get_digits_from_num(num: u32) -> Vec<u32> {
num.to_string().chars().map(|d| d.to_digit(10).unwrap()).collect()
}
|
use amethyst::ecs::prelude::{Component, VecStorage};
/// The ship's fuel tank.
/// Holds `fuel_level`, the current amount of fuel in the tank.
/// Holds `capacity`, the maximum amount of fuel carryable.
/// Holds `weight_per_fuel`, the weght of each unit of fuel, updated on every movement or when refilling.
#[derive(Debug, Default, Clone)]
pub struct FuelTank {
/// Current fuel stored in this tank.
pub fuel_level: f32,
/// Maximum fuel storable by this tank.
pub capacity: f32,
/// weight of the fuel
pub weight_per_fuel: f32,
}
impl FuelTank {
pub fn new(fuel_level: f32, capacity: f32, weight_per_fuel: f32,) -> Self {
FuelTank {
fuel_level,
capacity,
weight_per_fuel,
}
}
}
impl Component for FuelTank {
type Storage = VecStorage<Self,>;
}
|
use log::*;
use rumqttc::{ConnectionError, TlsConfiguration, Transport};
use rumqttc::{Event, EventLoop, Incoming, MqttOptions, Publish, QoS, Request, Subscribe};
use serde::{Deserialize, Serialize};
use simple_logger::SimpleLogger;
use std::{
fs,
io::{self, BufRead, Read, Write},
path::PathBuf,
time::SystemTime,
};
use structopt::StructOpt;
// use tokio::fs;
// use tokio::io::AsyncBufReadExt;
#[derive(Debug, StructOpt)]
#[structopt(name = "mqtt-recorder", about = "mqtt recorder written in rust")]
struct Opt {
//// The verbosity of the program
#[structopt(short, long, default_value = "1")]
verbose: u32,
/// The address to connect to
#[structopt(short, long, default_value = "localhost")]
address: String,
/// The port to connect to
#[structopt(short, long, default_value = "1883")]
port: u16,
/// certificate of trusted CA
#[structopt(short, long)]
cafile: Option<PathBuf>,
/// Mode to run software in
#[structopt(subcommand)]
mode: Mode,
}
#[derive(Debug, StructOpt)]
pub enum Mode {
// Records values from an MQTT Stream
#[structopt(name = "record")]
Record(RecordOptions),
// Replay values from an input file
#[structopt(name = "replay")]
Replay(ReplayOtions),
}
#[derive(Debug, StructOpt)]
pub struct RecordOptions {
#[structopt(short, long, default_value = "#")]
// Topic to record, can be used multiple times for a set of topics
topic: Vec<String>,
// The file to write mqtt messages to
#[structopt(short, long, parse(from_os_str))]
filename: PathBuf,
}
#[derive(Debug, StructOpt)]
pub struct ReplayOtions {
#[structopt(short, long, default_value = "1.0")]
// Speed of the playback, 2.0 makes it twice as fast
speed: f64,
// The file to read replay values from
#[structopt(short, long, parse(from_os_str))]
filename: PathBuf,
#[structopt(
name = "loop",
short,
long,
parse(try_from_str),
default_value = "false"
)]
loop_replay: bool,
}
#[derive(Serialize, Deserialize)]
struct MqttMessage {
time: f64,
qos: u8,
retain: bool,
topic: String,
msg_b64: String,
}
#[tokio::main]
async fn main() {
let opt = Opt::from_args();
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis();
let servername = format!("{}-{}", "mqtt-recorder-rs", now);
match opt.verbose {
1 => {
let _e = SimpleLogger::new().with_level(LevelFilter::Info).init();
}
2 => {
let _e = SimpleLogger::new().with_level(LevelFilter::Debug).init();
}
3 => {
let _e = SimpleLogger::new().with_level(LevelFilter::Trace).init();
}
0 | _ => {}
}
let mut mqttoptions = MqttOptions::new(servername, &opt.address, opt.port);
if let Some(cafile) = opt.cafile {
let mut file = fs::OpenOptions::new();
let mut file = file.read(true).create_new(false).open(&cafile).unwrap();
let mut vec = Vec::new();
let _ = file.read_to_end(&mut vec).unwrap();
let tlsconfig = TlsConfiguration::Simple {
ca: vec,
alpn: None,
client_auth: None,
};
let transport = Transport::Tls(tlsconfig);
mqttoptions.set_transport(transport);
}
mqttoptions.set_keep_alive(5);
let mut eventloop = EventLoop::new(mqttoptions, 20 as usize);
let requests_tx = eventloop.requests_tx.clone();
// Enter recording mode and open file readonly
match opt.mode {
Mode::Replay(replay) => {
let (stop_tx, stop_rx) = std::sync::mpsc::channel();
// Sends the recorded messages
tokio::spawn(async move {
// text
loop {
let mut previous = -1.0;
let mut file = fs::OpenOptions::new();
debug!("{:?}", replay.filename);
let file = file
.read(true)
.create_new(false)
.open(&replay.filename)
.unwrap();
for line in io::BufReader::new(&file).lines() {
if let Ok(line) = line {
let msg = serde_json::from_str::<MqttMessage>(&line);
if let Ok(msg) = msg {
if previous < 0.0 {
previous = msg.time;
}
tokio::time::sleep(std::time::Duration::from_millis(
((msg.time - previous) * 1000.0 / replay.speed) as u64,
))
.await;
previous = msg.time;
let qos = match msg.qos {
0 => QoS::AtMostOnce,
1 => QoS::AtLeastOnce,
2 => QoS::ExactlyOnce,
_ => QoS::AtMostOnce,
};
let publish = Publish::new(
msg.topic,
qos,
base64::decode(msg.msg_b64).unwrap(),
);
let _e = requests_tx.send(publish.into()).await;
}
}
}
if !replay.loop_replay {
let _e = stop_tx.send(());
break;
}
}
});
// run the eventloop forever
while let Err(std::sync::mpsc::TryRecvError::Empty) = stop_rx.try_recv() {
let _res = eventloop.poll().await.unwrap();
}
}
// Enter recording mode and open file writeable
Mode::Record(record) => {
let mut file = fs::OpenOptions::new();
let mut file = file
.write(true)
.create_new(true)
.open(&record.filename)
.unwrap();
loop {
let res = eventloop.poll().await;
match res {
Ok(Event::Incoming(Incoming::Publish(publish))) => {
let qos = match publish.qos {
QoS::AtMostOnce => 0,
QoS::AtLeastOnce => 1,
QoS::ExactlyOnce => 2,
};
let msg = MqttMessage {
time: SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs_f64(),
retain: publish.retain,
topic: publish.topic.clone(),
msg_b64: base64::encode(&*publish.payload),
qos,
};
let serialized = serde_json::to_string(&msg).unwrap();
writeln!(file, "{}", serialized).unwrap();
debug!("{:?}", publish);
}
Ok(Event::Incoming(Incoming::ConnAck(_connect))) => {
info!("Connected to: {}:{}", opt.address, opt.port);
for topic in &record.topic {
let subscription = Subscribe::new(topic, QoS::AtLeastOnce);
let _ = requests_tx.send(Request::Subscribe(subscription)).await;
}
}
Err(e) => {
error!("{:?}", e);
if let ConnectionError::Network(_e) = e {
break;
}
}
_ => {}
}
}
}
}
}
|
extern crate vyre_rs;
use vyre_rs::graphics::*;
use vyre_rs::graphics::opengl::*;
type RenderFormat = render_format::Rgba8;
type DepthFormat = depth_format::Depth;
/// initializes the graphics engine and returns earlier if errors occur. Returns handles to window, device, render/depth target and a simple pipeline
fn initialize()
-> Result<(GlWindow,
GlBackend,
RenderTarget<< GlBackend as Backend>::Resources, RenderFormat>,
DepthTarget<< GlBackend as Backend>::Resources, DepthFormat>,
PipelineState<< GlBackend as Backend>::Resources,
pipelines::pipe_vertex_pos2d_rgba8::Meta>),
String>
{
/// creates a window with the title "Vyre Engine", 512 width, 512 height, no vsync and in windowed mode
let wnd = try!(GlWindow::new("Vyre Engine",
Size {
width: 512,
height: 512,
},
false,
WindowMode::Window));
/// creates a device, rendertarget and depth target for the window
let (mut dev, rt, dt) = GlBackend::new(&wnd);
/// creates a simple pipeline
let pipeline = try!(dev.new_pipeline(include_bytes!("../graphics/shader/simple_v2d_col.glslv"),
include_bytes!("../graphics/shader/simple_color.glslf"),
pipelines::pipe_vertex_pos2d_rgba8::new()));
Ok((wnd, dev, rt, dt, pipeline))
}
/// entry point to the command line / gui app
fn main() {
match initialize() {
Ok((mut wnd, mut dev, rt, _, pipeline)) => {
/// creates a command encoder
let mut encoder = dev.new_command_encoder();
/// creates a vertex buffer for a triangle with no index data.
let (vertex_buffer, slice) =
dev.create_vertex_buffer_and_slice(&pipelines::SIMPLE_TRIANGLE, ());
/// sets the vertex buffer to be drawn to the main_target
let pipeline_data = pipelines::pipe_vertex_pos2d_rgba8::Data {
vbuf: vertex_buffer,
out: rt,
};
'main: loop {
// Todo: streamline encoder functions for the GlDevice
encoder.clear(&pipeline_data.out, [0.5, 0.7, 0.3, 1.0]);
encoder.draw(&slice, &pipeline, &pipeline_data);
encoder.flush(&mut dev.device);
wnd.swap_buffers();
dev.cleanup();
}
}
Err(_) => {}
}
}
|
extern crate bio;
use bio::io::fastq::Record;
use std::io;
use std::io::{Error, ErrorKind};
pub struct PairRecords<R: io::Read, S: io::Read> {
r1_records: bio::io::fastq::Records<R>,
r2_records: bio::io::fastq::Records<S>,
}
impl<R: io::Read, S: io::Read> PairRecords<R, S> {
pub fn new(
r1_records: bio::io::fastq::Records<R>,
r2_records: bio::io::fastq::Records<S>,
) -> Self {
PairRecords {
r1_records: r1_records,
r2_records: r2_records,
}
}
}
impl<R: io::Read, S: io::Read> Iterator for PairRecords<R, S> {
type Item = io::Result<(Record, Record)>;
fn next(&mut self) -> Option<io::Result<(Record, Record)>> {
match (self.r1_records.next(), self.r2_records.next()) {
(None, None) => None,
(Some(Err(e)), _) => Some(Err(e)),
(_, Some(Err(e))) => Some(Err(e)),
(None, Some(_)) => Some(Err(Error::new(ErrorKind::Other, "R1 ended before R2"))),
(Some(_), None) => Some(Err(Error::new(ErrorKind::Other, "R2 ended before R1"))),
(Some(Ok(r1)), Some(Ok(r2))) => Some(Ok((r1, r2))),
}
}
}
|
#![cfg(feature = "std")]
use tabled::settings::{Concat, Style};
use crate::matrix::Matrix;
use testing_table::test_table;
test_table!(
join_vertical_0,
Matrix::new(2, 3).insert((1, 0), "123").with(Style::psql())
.with(Concat::vertical(Matrix::new(2, 3).to_table()))
.to_string(),
" N | column 0 | column 1 | column 2 "
"-----+----------+----------+----------"
" 123 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 "
" N | column 0 | column 1 | column 2 "
" 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 "
);
test_table!(
join_vertical_1,
Matrix::new(2, 3)
.with(Concat::vertical(Matrix::new(2, 3).insert((1, 0), "123").with(Style::psql()))),
"+-----+----------+----------+----------+"
"| N | column 0 | column 1 | column 2 |"
"+-----+----------+----------+----------+"
"| 0 | 0-0 | 0-1 | 0-2 |"
"+-----+----------+----------+----------+"
"| 1 | 1-0 | 1-1 | 1-2 |"
"+-----+----------+----------+----------+"
"| N | column 0 | column 1 | column 2 |"
"+-----+----------+----------+----------+"
"| 123 | 0-0 | 0-1 | 0-2 |"
"+-----+----------+----------+----------+"
"| 1 | 1-0 | 1-1 | 1-2 |"
"+-----+----------+----------+----------+"
);
test_table!(
join_horizontal_0,
{
let mut table1 = Matrix::table(2, 3);
table1.with(Style::ascii());
let mut table2 = Matrix::table(2, 3);
table2.with(Style::psql());
table2.with(Concat::horizontal(table1)).to_string()
},
" N | column 0 | column 1 | column 2 | N | column 0 | column 1 | column 2 "
"---+----------+----------+----------+---+----------+----------+----------"
" 0 | 0-0 | 0-1 | 0-2 | 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 | 1 | 1-0 | 1-1 | 1-2 "
);
test_table!(
join_horizontal_1,
{
let mut table1 = Matrix::table(2, 3);
table1.with(Style::ascii());
let mut table2 = Matrix::table(2, 3);
table2.with(Style::psql());
table1.with(Concat::horizontal(table2)).to_string()
},
"+---+----------+----------+----------+---+----------+----------+----------+"
"| N | column 0 | column 1 | column 2 | N | column 0 | column 1 | column 2 |"
"+---+----------+----------+----------+---+----------+----------+----------+"
"| 0 | 0-0 | 0-1 | 0-2 | 0 | 0-0 | 0-1 | 0-2 |"
"+---+----------+----------+----------+---+----------+----------+----------+"
"| 1 | 1-0 | 1-1 | 1-2 | 1 | 1-0 | 1-1 | 1-2 |"
"+---+----------+----------+----------+---+----------+----------+----------+"
);
test_table!(
join_vertical_different_size,
{
let mut table1 = Matrix::table(2, 2);
table1.with(Style::psql());
let mut table2 = Matrix::table(2, 3);
table2.with(Style::psql());
table1.with(Concat::vertical(table2)).to_string()
},
" N | column 0 | column 1 | "
"---+----------+----------+----------"
" 0 | 0-0 | 0-1 | "
" 1 | 1-0 | 1-1 | "
" N | column 0 | column 1 | column 2 "
" 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 "
);
test_table!(
join_horizontal_different_size,
{
let mut table1 = Matrix::table(2, 3);
table1.with(Style::psql());
let mut table2 = Matrix::table(3, 3);
table2.with(Style::psql());
table1.with(Concat::horizontal(table2)).to_string()
},
" N | column 0 | column 1 | column 2 | N | column 0 | column 1 | column 2 "
"---+----------+----------+----------+---+----------+----------+----------"
" 0 | 0-0 | 0-1 | 0-2 | 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 | 1 | 1-0 | 1-1 | 1-2 "
" | | | | 2 | 2-0 | 2-1 | 2-2 "
);
test_table!(
join_horizontal_with_not_default_empty_string,
{
let mut table1 = Matrix::table(2, 3);
table1.with(Style::psql());
let mut table2 = Matrix::table(3, 3);
table2.with(Style::psql());
table1.with(Concat::horizontal(table2).default_cell("NaN")).to_string()
},
" N | column 0 | column 1 | column 2 | N | column 0 | column 1 | column 2 "
"-----+----------+----------+----------+---+----------+----------+----------"
" 0 | 0-0 | 0-1 | 0-2 | 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 | 1 | 1-0 | 1-1 | 1-2 "
" NaN | NaN | NaN | NaN | 2 | 2-0 | 2-1 | 2-2 "
);
test_table!(
join_vertical_with_not_default_empty_string,
{
let mut table1 = Matrix::table(2, 2);
table1.with(Style::psql());
let mut table2 = Matrix::table(2, 3);
table2.with(Style::psql());
table1.with(Concat::vertical(table2).default_cell("NaN")).to_string()
},
" N | column 0 | column 1 | NaN "
"---+----------+----------+----------"
" 0 | 0-0 | 0-1 | NaN "
" 1 | 1-0 | 1-1 | NaN "
" N | column 0 | column 1 | column 2 "
" 0 | 0-0 | 0-1 | 0-2 "
" 1 | 1-0 | 1-1 | 1-2 "
);
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
OperationResults_Get(#[from] operation_results::get::Error),
#[error(transparent)]
LabPlans_ListBySubscription(#[from] lab_plans::list_by_subscription::Error),
#[error(transparent)]
LabPlans_ListByResourceGroup(#[from] lab_plans::list_by_resource_group::Error),
#[error(transparent)]
LabPlans_Get(#[from] lab_plans::get::Error),
#[error(transparent)]
LabPlans_CreateOrUpdate(#[from] lab_plans::create_or_update::Error),
#[error(transparent)]
LabPlans_Update(#[from] lab_plans::update::Error),
#[error(transparent)]
LabPlans_Delete(#[from] lab_plans::delete::Error),
#[error(transparent)]
LabPlans_SaveImage(#[from] lab_plans::save_image::Error),
#[error(transparent)]
Images_ListByLabPlan(#[from] images::list_by_lab_plan::Error),
#[error(transparent)]
Images_Get(#[from] images::get::Error),
#[error(transparent)]
Images_CreateOrUpdate(#[from] images::create_or_update::Error),
#[error(transparent)]
Images_Update(#[from] images::update::Error),
#[error(transparent)]
Labs_ListBySubscription(#[from] labs::list_by_subscription::Error),
#[error(transparent)]
Labs_ListByResourceGroup(#[from] labs::list_by_resource_group::Error),
#[error(transparent)]
Labs_Get(#[from] labs::get::Error),
#[error(transparent)]
Labs_CreateOrUpdate(#[from] labs::create_or_update::Error),
#[error(transparent)]
Labs_Update(#[from] labs::update::Error),
#[error(transparent)]
Labs_Delete(#[from] labs::delete::Error),
#[error(transparent)]
Labs_Publish(#[from] labs::publish::Error),
#[error(transparent)]
Labs_SyncGroup(#[from] labs::sync_group::Error),
#[error(transparent)]
Users_ListByLab(#[from] users::list_by_lab::Error),
#[error(transparent)]
Users_Get(#[from] users::get::Error),
#[error(transparent)]
Users_CreateOrUpdate(#[from] users::create_or_update::Error),
#[error(transparent)]
Users_Update(#[from] users::update::Error),
#[error(transparent)]
Users_Delete(#[from] users::delete::Error),
#[error(transparent)]
Users_Invite(#[from] users::invite::Error),
#[error(transparent)]
VirtualMachines_ListByLab(#[from] virtual_machines::list_by_lab::Error),
#[error(transparent)]
VirtualMachines_Get(#[from] virtual_machines::get::Error),
#[error(transparent)]
VirtualMachines_Start(#[from] virtual_machines::start::Error),
#[error(transparent)]
VirtualMachines_Stop(#[from] virtual_machines::stop::Error),
#[error(transparent)]
VirtualMachines_Reimage(#[from] virtual_machines::reimage::Error),
#[error(transparent)]
VirtualMachines_Redeploy(#[from] virtual_machines::redeploy::Error),
#[error(transparent)]
VirtualMachines_ResetPassword(#[from] virtual_machines::reset_password::Error),
#[error(transparent)]
Schedules_ListByLab(#[from] schedules::list_by_lab::Error),
#[error(transparent)]
Schedules_Get(#[from] schedules::get::Error),
#[error(transparent)]
Schedules_CreateOrUpdate(#[from] schedules::create_or_update::Error),
#[error(transparent)]
Schedules_Update(#[from] schedules::update::Error),
#[error(transparent)]
Schedules_Delete(#[from] schedules::delete::Error),
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.LabServices/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operation_results {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
operation_result_id: &str,
) -> std::result::Result<get::Response, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.LabServices/operationResults/{}",
operation_config.base_path(),
subscription_id,
operation_result_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationResult =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(get::Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(get::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::OperationResult),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod lab_plans {
use super::{models, API_VERSION};
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedLabPlans, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.LabServices/labPlans",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedLabPlans = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::PagedLabPlans, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedLabPlans = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
) -> std::result::Result<models::LabPlan, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
body: &models::LabPlan,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::LabPlan),
Created201(models::LabPlan),
Accepted202(models::LabPlan),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
body: &models::LabPlanUpdate,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::LabPlan =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::LabPlan),
Accepted202(models::LabPlan),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn save_image(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
body: &models::SaveImageBody,
) -> std::result::Result<save_image::Response, save_image::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}/saveImage",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(save_image::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(save_image::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(save_image::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(save_image::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(save_image::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(save_image::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(save_image::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| save_image::Error::DeserializeError(source, rsp_body.clone()))?;
Err(save_image::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod save_image {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod images {
use super::{models, API_VERSION};
pub async fn list_by_lab_plan(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedImages, list_by_lab_plan::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}/images",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_lab_plan::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_lab_plan::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_lab_plan::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_lab_plan::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedImages = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_lab_plan::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_lab_plan::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_lab_plan::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_lab_plan {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
image_name: &str,
) -> std::result::Result<models::Image, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}/images/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name,
image_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Image =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
image_name: &str,
body: &models::Image,
) -> std::result::Result<models::Image, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}/images/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name,
image_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Image = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_plan_name: &str,
image_name: &str,
body: &models::ImageUpdate,
) -> std::result::Result<models::Image, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labPlans/{}/images/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_plan_name,
image_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Image =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod labs {
use super::{models, API_VERSION};
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedLabs, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.LabServices/labs",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedLabs = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<models::PagedLabs, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedLabs = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
) -> std::result::Result<models::Lab, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Lab =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
body: &models::Lab,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Lab = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Lab = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::Lab = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Lab),
Created201(models::Lab),
Accepted202(models::Lab),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
body: &models::LabUpdate,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Lab =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::Lab =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Lab),
Accepted202(models::Lab),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn publish(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
) -> std::result::Result<publish::Response, publish::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/publish",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(publish::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(publish::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(publish::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(publish::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(publish::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(publish::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| publish::Error::DeserializeError(source, rsp_body.clone()))?;
Err(publish::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod publish {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn sync_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
) -> std::result::Result<sync_group::Response, sync_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/syncGroup",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(sync_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(sync_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(sync_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(sync_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(sync_group::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(sync_group::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| sync_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(sync_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod sync_group {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod users {
use super::{models, API_VERSION};
pub async fn list_by_lab(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedUsers, list_by_lab::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_lab::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_lab::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_lab::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_lab::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedUsers =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_lab::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_lab {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
user_name: &str,
) -> std::result::Result<models::User, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
user_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
user_name: &str,
body: &models::User,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
user_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::User = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::User = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::User),
Created201(models::User),
Accepted202(models::User),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
user_name: &str,
body: &models::UserUpdate,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
user_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: models::User =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::User),
Accepted202(models::User),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
user_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
user_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn invite(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
user_name: &str,
body: &models::InviteBody,
) -> std::result::Result<invite::Response, invite::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/users/{}/invite",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
user_name
);
let mut url = url::Url::parse(url_str).map_err(invite::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(invite::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(invite::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(invite::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(invite::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(invite::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(invite::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| invite::Error::DeserializeError(source, rsp_body.clone()))?;
Err(invite::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod invite {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod virtual_machines {
use super::{models, API_VERSION};
pub async fn list_by_lab(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedVirtualMachines, list_by_lab::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_lab::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_lab::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_lab::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_lab::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedVirtualMachines =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_lab::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_lab {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
) -> std::result::Result<models::VirtualMachine, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::VirtualMachine =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn start(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
) -> std::result::Result<start::Response, start::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}/start",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(start::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(start::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(start::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(start::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(start::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(start::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| start::Error::DeserializeError(source, rsp_body.clone()))?;
Err(start::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod start {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn stop(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
) -> std::result::Result<stop::Response, stop::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}/stop",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(stop::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(stop::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(stop::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(stop::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(stop::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(stop::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| stop::Error::DeserializeError(source, rsp_body.clone()))?;
Err(stop::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod stop {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn reimage(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
) -> std::result::Result<reimage::Response, reimage::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}/reimage",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(reimage::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(reimage::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(reimage::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(reimage::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(reimage::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(reimage::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| reimage::Error::DeserializeError(source, rsp_body.clone()))?;
Err(reimage::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod reimage {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn redeploy(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
) -> std::result::Result<redeploy::Response, redeploy::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}/redeploy",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(redeploy::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(redeploy::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(redeploy::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(redeploy::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(redeploy::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(redeploy::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| redeploy::Error::DeserializeError(source, rsp_body.clone()))?;
Err(redeploy::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod redeploy {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn reset_password(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
virtual_machine_name: &str,
body: &models::ResetPasswordBody,
) -> std::result::Result<reset_password::Response, reset_password::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/virtualMachines/{}/resetPassword",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(reset_password::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(reset_password::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(reset_password::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(reset_password::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(reset_password::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(reset_password::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(reset_password::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| reset_password::Error::DeserializeError(source, rsp_body.clone()))?;
Err(reset_password::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod reset_password {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod schedules {
use super::{models, API_VERSION};
pub async fn list_by_lab(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
filter: Option<&str>,
) -> std::result::Result<models::PagedSchedules, list_by_lab::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/schedules",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_lab::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_lab::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_lab::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_lab::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PagedSchedules =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list_by_lab::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_lab::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_lab {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
schedule_name: &str,
) -> std::result::Result<models::Schedule, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/schedules/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
schedule_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Schedule =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
schedule_name: &str,
body: &models::Schedule,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/schedules/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
schedule_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Schedule = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Schedule = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Schedule),
Created201(models::Schedule),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
schedule_name: &str,
body: &models::ScheduleUpdate,
) -> std::result::Result<models::Schedule, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/schedules/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
schedule_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Schedule =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
lab_name: &str,
schedule_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.LabServices/labs/{}/schedules/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
lab_name,
schedule_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
// SPDX-License-Identifier: (LGPL-2.1 OR BSD-2-Clause)
use anyhow::{bail, Result};
use chrono::Local;
use core::time::Duration;
use libbpf_rs::PerfBufferBuilder;
use libc::{rlimit, setrlimit, RLIMIT_MEMLOCK};
use plain::Plain;
use std::process::exit;
use structopt::StructOpt;
use sym_finder;
mod bpf;
use bpf::*;
#[derive(Debug, StructOpt)]
struct Command {
#[structopt(short, long)]
verbose: bool,
}
pub mod gethostlatency_bss_types {
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct data_t {
pub pid: u32,
pub __pad_4: [u8; 4],
pub delta: u64,
pub comm: [u8; 16],
pub host: [u8; 80],
}
}
impl Default for gethostlatency_bss_types::data_t {
fn default() -> Self {
Self {
pid: 0,
__pad_4: [0; 4],
delta: 0,
comm: [0; 16],
host: [0; 80],
}
}
}
unsafe impl Plain for gethostlatency_bss_types::data_t {}
fn bump_memlock_rlimit() -> Result<()> {
let rlimit = rlimit {
rlim_cur: 128 << 20,
rlim_max: 128 << 20,
};
if unsafe { setrlimit(RLIMIT_MEMLOCK, &rlimit) } != 0 {
bail!("Failed to increase rlimit");
};
Ok(())
}
fn handle_event(_cpu: i32, data: &[u8]) {
let mut event = gethostlatency_bss_types::data_t::default();
plain::copy_from_bytes(&mut event, data).expect("Data buffer was too short");
let now = Local::now();
let pid = event.pid;
let comm = std::str::from_utf8(&event.comm).unwrap();
let host = std::str::from_utf8(&event.host).unwrap();
let lat = event.delta as f64 / 1_000_000.0;
println!(
"{:<9} pid: {:<6} comm: {:<16} host: {:<30} latency: {:>10.2} msec",
now,
pid,
comm.trim_end_matches(char::from(0)),
host.trim_end_matches(char::from(0)),
lat
)
}
fn handle_lost_events(cpu: i32, count: u64) {
eprintln!("Lost {} events on CPU {}", count, cpu)
}
fn main() -> Result<()> {
let options = Command::from_args();
let mut skel_builder = GethostlatencySkelBuilder::default();
if options.verbose {
skel_builder.obj_builder.debug(true);
}
bump_memlock_rlimit()?;
let open_skel = skel_builder.open()?;
let skel = open_skel.load()?;
let pid = std::process::id() as libc::pid_t;
let pathname = sym_finder::resolve_proc_maps_lib(pid, "libc").unwrap() + "\0";
let func_ofs = sym_finder::find_sym(pid, "libc", "gethostbyname")?
.unwrap()
.st_value as usize;
let _res = skel
.progs()
.handle__entry_gethostbyname()
.attach_uprobe(false, -1, &pathname, func_ofs)?;
let _res = skel
.progs()
.handle__return_gethostbyname()
.attach_uprobe(true, -1, &pathname, func_ofs)?;
let func_ofs = sym_finder::find_sym(pid, "libc", "gethostbyname")?
.unwrap()
.st_value as usize;
let _res = skel
.progs()
.handle__entry_gethostbyname2()
.attach_uprobe(false, -1, &pathname, func_ofs)?;
let _res = skel
.progs()
.handle__return_gethostbyname2()
.attach_uprobe(true, -1, &pathname, func_ofs)?;
let func_ofs = sym_finder::find_sym(pid, "libc", "getaddrinfo")?
.unwrap()
.st_value as usize;
let _res = skel
.progs()
.handle__entry_getaddrinfo()
.attach_uprobe(false, -1, &pathname, func_ofs)?;
let _res = skel
.progs()
.handle__return_getaddrinfo()
.attach_uprobe(true, -1, &pathname, func_ofs)?;
let perf = PerfBufferBuilder::new(skel.maps().events())
.sample_cb(handle_event)
.lost_cb(handle_lost_events)
.build()?;
loop {
let ret = perf.poll(Duration::from_millis(100));
match ret {
Ok(()) => (),
Err(e) => {
eprintln!("Error polling perf buffer: {}", e);
exit(1);
}
};
}
}
|
#![feature(old_io)]
#![feature(unicode)]
use std::old_io as io;
fn main () {
let input: String = io::stdin().read_line().ok().expect("Fuck.");
let mut count_a = 0;
let mut count_c = 0;
let mut count_g = 0;
let mut count_t = 0;
for i in input.graphemes(true) {
match i {
"A" => count_a += 1,
"C" => count_c += 1,
"G" => count_g += 1,
"T" => count_t += 1,
_ => (),
}
}
print!("{} {} {} {}", count_a, count_c, count_g, count_t);
} |
use crate::vtable::VTable;
use apllodb_shared_components::{
ApllodbError, ApllodbResult, BooleanExpression, ComparisonFunction, Expression,
LogicalFunction, NnSqlValue, RPos, Schema, SchemaIndex, SqlConvertible, SqlValue,
};
use apllodb_storage_engine_interface::{ColumnDataType, ColumnName, Row, RowSchema, TableName};
use serde::{Deserialize, Serialize};
use std::{collections::VecDeque, ops::Index};
/// Primary key which other components than Storage Engine observes.
#[derive(Clone, PartialEq, Hash, Debug, Serialize, Deserialize, new)]
pub struct ApparentPrimaryKey {
table_name: TableName,
pk_column_names: Vec<ColumnName>,
// real "key" of a record.
sql_values: Vec<NnSqlValue>,
}
impl ApparentPrimaryKey {
/// Get [NnSqlValue](apllodb_shared_components::NnSqlValue) from a PK column.
///
/// # Failures
///
/// - [NameErrorNotFound](apllodb_shared_components::SqlState::NameErrorNotFound) when:
/// - `column_name` is not in this PK.
pub fn get_sql_value(&self, column_name: &ColumnName) -> ApllodbResult<&NnSqlValue> {
let target_sql_value = self
.zipped()
.iter()
.find_map(|(cn, sql_value)| (*cn == column_name).then(|| *sql_value))
.ok_or_else(|| {
ApllodbError::name_error_not_found(format!(
"undefined column name in PK: `{:?}`",
column_name
))
})?;
Ok(target_sql_value)
}
/// Get Rust value from a PK column.
///
/// # Failures
///
/// - [NameErrorNotFound](apllodb_shared_components::SqlState::NameErrorNotFound) when:
/// - `column_name` is not in this PK.
pub fn get<T: SqlConvertible>(&self, column_name: &ColumnName) -> ApllodbResult<T> {
let sql_value = self.get_sql_value(column_name)?;
sql_value.unpack()
}
}
impl ApparentPrimaryKey {
pub fn from_table_and_row(
vtable: &VTable,
schema: &RowSchema,
row: &mut Row,
) -> ApllodbResult<Self> {
let apk_cdts = vtable.table_wide_constraints().pk_column_data_types();
let apk_column_names = apk_cdts
.iter()
.map(|cdt| cdt.column_name().clone())
.collect::<Vec<ColumnName>>();
let apk_sql_values = apk_cdts
.iter()
.map(|cdt| {
let (pos, _) = schema.index(&SchemaIndex::from(cdt.column_name()))?;
if let SqlValue::NotNull(sql_value) = row.get_sql_value(pos)? {
Ok(sql_value.clone())
} else {
panic!("primary key's column must be NOT NULL")
}
})
.collect::<ApllodbResult<Vec<NnSqlValue>>>()?;
Ok(Self::new(
vtable.table_name().clone(),
apk_column_names,
apk_sql_values,
))
}
pub fn from_table_pk_def(
vtable: &VTable,
column_names: &[ColumnName],
row: &Row,
) -> ApllodbResult<Self> {
let apk_cdts = vtable.table_wide_constraints().pk_column_data_types();
let apk_column_names = apk_cdts
.iter()
.map(|cdt| cdt.column_name().clone())
.collect::<Vec<ColumnName>>();
let apk_sql_values = apk_cdts
.iter()
.map(|cdt| {
let raw_pos = column_names
.iter()
.position(|cn| cn == cdt.column_name())
.unwrap_or_else(|| {
panic!(
"primary key's column `{}` is not included in PK's columns=`{:#?}`",
cdt.column_name().as_str(),
apk_cdts
)
});
let sql_value = row.index(RPos::new(raw_pos)).clone();
if let SqlValue::NotNull(nn_sql_value) = sql_value {
nn_sql_value
} else {
panic!("primary key's column must be NOT NULL")
}
})
.collect::<Vec<NnSqlValue>>();
Ok(Self::new(
vtable.table_name().clone(),
apk_column_names,
apk_sql_values,
))
}
pub fn table_name(&self) -> &TableName {
&self.table_name
}
pub fn column_names(&self) -> &[ColumnName] {
&self.pk_column_names
}
pub fn sql_values(&self) -> &[NnSqlValue] {
&self.sql_values
}
pub fn zipped(&self) -> Vec<(&ColumnName, &NnSqlValue)> {
self.pk_column_names.iter().zip(&self.sql_values).collect()
}
pub fn into_zipped(self) -> Vec<(ColumnName, NnSqlValue)> {
self.pk_column_names
.into_iter()
.zip(self.sql_values)
.collect()
}
pub fn column_data_types(&self) -> Vec<ColumnDataType> {
self.zipped()
.into_iter()
.map(|(cname, sql_value)| {
ColumnDataType::new(cname.clone(), sql_value.sql_type(), false)
})
.collect()
}
pub fn to_condition_expression(&self) -> ApllodbResult<BooleanExpression> {
let mut comparisons = self
.zipped()
.into_iter()
.map(|(column_name, sql_value)| {
let index = SchemaIndex::from(
format!("{}.{}", self.table_name.as_str(), column_name.as_str()).as_str(),
);
ComparisonFunction::EqualVariant {
left: Box::new(Expression::SchemaIndexVariant(index)),
right: Box::new(Expression::ConstantVariant(SqlValue::NotNull(
sql_value.clone(),
))),
}
})
.collect::<VecDeque<ComparisonFunction>>();
let mut boolean_expr = BooleanExpression::ComparisonFunctionVariant(
comparisons
.pop_front()
.expect("ApparentPrimaryKey has at least 1 column value"),
);
while let Some(comparison) = comparisons.pop_front() {
boolean_expr = BooleanExpression::LogicalFunctionVariant(LogicalFunction::AndVariant {
left: Box::new(boolean_expr),
right: Box::new(BooleanExpression::ComparisonFunctionVariant(comparison)),
});
}
Ok(boolean_expr)
}
}
|
//! <https://github.com/EOSIO/eosio.cdt/blob/796ff8bee9a0fc864f665a0a4d018e0ff18ac383/libraries/eosiolib/contracts/eosio/producer_schedule.hpp#L54-L69>
use alloc::vec::Vec;
use crate::{AccountName, NumBytes, ProducerKey, Read, Write, PublicKey, Checksum256, UnsignedInt};
use codec::{Encode, Decode};
use core::default::Default;
#[cfg(feature = "std")]
use serde::{Deserialize, Serialize};
/// Defines both the order, account name, and signing keys of the active set
/// of producers.
#[derive(Read, Write, NumBytes, Clone, Debug, PartialEq, Encode, Decode)]
#[cfg_attr(feature = "std", derive(Deserialize, Serialize))]
#[eosio_core_root_path = "crate"]
#[repr(C)]
pub struct ProducerSchedule {
/// Version number of the schedule. It is sequentially incrementing
/// version number.
pub version: u32,
/// List of producers for this schedule, including its signing key
pub producers: Vec<ProducerKey>,
}
impl ProducerSchedule {
pub fn new(version: u32, producers: Vec<ProducerKey>) -> Self {
Self {
version,
producers
}
}
pub fn get_producer_key(&self, p: AccountName) -> PublicKey {
for i in self.producers.iter() {
if i.producer_name == p {
return i.block_signing_key.clone();
}
}
Default::default()
}
pub fn schedule_hash(&self) -> crate::Result<Checksum256> {
Checksum256::hash(self.clone())
}
}
// This is just for testing
impl Default for ProducerSchedule {
fn default() -> Self {
let version = 0u32;
let producers = {
let producer_name = AccountName::from(6138663577826885632);
let type_ = UnsignedInt::from(0u32);
let data = [
2u8, 192, 222, 210, 188, 31, 19, 5, 251, 15, 170, 197, 230, 192, 62, 227,
161, 146, 66, 52, 152, 84, 39, 182, 22, 124, 165, 105, 209, 61, 244, 53, 207
];
let pk = PublicKey { type_, data };
ProducerKey { producer_name, block_signing_key: pk}
};
ProducerSchedule {
version,
producers: alloc::vec![producers]
}
}
}
#[cfg(test)]
mod test {
use std::{
error::Error,
fs::File,
io::Read,
path::Path,
};
use super::*;
use std::str::FromStr;
fn read_json_from_file(json_name: impl AsRef<str>) -> Result<String, Box<dyn Error>> {
let path = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/src/test_data/")).join(json_name.as_ref());
let mut file = File::open(path)?;
let mut json_str = String::new();
file.read_to_string(&mut json_str)?;
Ok(json_str)
}
#[test]
fn producers_schedule_deserialization_should_be_ok() {
let s = r#"
{
"version":0,
"producers":[
{
"producer_name":"eosio",
"block_signing_key":"EOS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV"
}
]
}
"#;
let new_producers: Result<ProducerSchedule, _> = serde_json::from_str(&s);
assert!(new_producers.is_ok());
let new_producers = new_producers.unwrap();
assert_eq!(new_producers.version, 0);
assert_eq!(new_producers.producers[0].producer_name.to_string(), "eosio");
assert_eq!(new_producers.producers[0].block_signing_key.to_string(), "EOS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV");
}
#[test]
fn producers_schedule_default_should_be_ok() {
let new_producers = ProducerSchedule::default();
assert_eq!(new_producers.version, 0);
assert_eq!(new_producers.producers[0].producer_name.to_string(), "eosio");
assert_eq!(new_producers.producers[0].block_signing_key.to_string(), "EOS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV");
}
#[test]
fn new_producers_should_be_ok() {
let json = "new_producers.json";
let new_producers_str = read_json_from_file(json);
assert!(new_producers_str.is_ok());
let new_producers: Result<ProducerSchedule, _> = serde_json::from_str(&new_producers_str.unwrap());
assert!(new_producers.is_ok());
let new_producers = new_producers.unwrap();
assert_eq!(new_producers.version, 1u32);
assert_eq!(new_producers.producers.len(), 20);
let producer_1 = new_producers.producers.first();
assert!(producer_1.is_some());
assert_eq!(producer_1.unwrap().producer_name.to_string(), "batinthedark");
assert_eq!(producer_1.unwrap().block_signing_key.to_string(), "EOS6dwoM8XGMQn49LokUcLiony7JDkbHrsFDvh5svLvPDkXtvM7oR");
let producer_20 = new_producers.producers.last();
assert!(producer_20.is_some());
assert_eq!(producer_20.unwrap().producer_name.to_string(), "wealthyhorse");
assert_eq!(producer_20.unwrap().block_signing_key.to_string(), "EOS5i1HrfxfHLRJqbExgRodhrZwp4dcLioNn4xZWCyhoBK6DNZgZt");
let producer_x = new_producers.producers.get(10);
assert!(producer_x.is_some());
assert_eq!(producer_x.unwrap().producer_name.to_string(), "lioninjungle");
assert_eq!(producer_x.unwrap().block_signing_key.to_string(), "EOS5BcLionmbgEtcmu7qY6XKWaE1q31qCQSsd89zXij7FDXQnKjwk");
let producer_x = new_producers.producers.get(13);
assert!(producer_x.is_some());
assert_ne!(producer_x.unwrap().producer_name.to_string(), "lioninjungle");
assert_ne!(producer_x.unwrap().block_signing_key.to_string(), "EOS5BcLionmbgEtcmu7qY6XKWaE1q31qCQSsd89zXij7FDXQnKjwk");
}
#[test]
fn get_producer_key_should_work() {
let json = "new_producers.json";
let new_producers_str = read_json_from_file(json);
assert!(new_producers_str.is_ok());
let new_producers: Result<ProducerSchedule, _> = serde_json::from_str(&new_producers_str.unwrap());
assert!(new_producers.is_ok());
let new_producers = new_producers.unwrap();
let pk = new_producers.get_producer_key(AccountName::from_str("wealthyhorse").unwrap());
assert_eq!(pk, PublicKey::from_str("EOS5i1HrfxfHLRJqbExgRodhrZwp4dcLioNn4xZWCyhoBK6DNZgZt").unwrap());
let pk = new_producers.get_producer_key(AccountName::from_str("pythoncolors").unwrap());
assert_eq!(pk, PublicKey::from_str("EOS8R7GB5CLionUEy8FgGksGAGtc2cbcQWgty3MTAgzJvGTmtqPLz").unwrap());
let pk = new_producers.get_producer_key(AccountName::from_str("littlerabbit").unwrap());
assert_eq!(pk, PublicKey::from_str("EOS65orCLioNFkVT5uDF7J63bNUk97oF8T83iWfuvbSKWYUUq9EWd").unwrap());
}
#[test]
fn schedule_hash_should_work() {
let json = "new_producers.json";
let new_producers_str = read_json_from_file(json);
assert!(new_producers_str.is_ok());
let new_producers: Result<ProducerSchedule, _> = serde_json::from_str(&new_producers_str.unwrap());
assert!(new_producers.is_ok());
let new_producers = new_producers.unwrap();
let hash = new_producers.schedule_hash();
assert!(hash.is_ok());
let hash = hash.unwrap();
assert_eq!(hash, "e2b28d9dbe1948d0f36973014bbe1c1c936cd38b7907ba29f2d3fb9061b2dd3c".into());
assert_eq!(
ProducerSchedule::default().schedule_hash().unwrap().to_string(),
"828135c21a947b15cdbf4941ba09e1c9e0a80e88a157b0989e9b476b71a21c6b"
);
}
}
|
use futures::{Future, Sink, Stream};
use netlink_packet_route::{
link::{LinkHeader, LinkMessage},
RtnlMessage,
};
use netlink_proto::{
packet::{
header::flags::{NLM_F_DUMP, NLM_F_REQUEST},
NetlinkFlags, NetlinkHeader, NetlinkMessage, NetlinkPayload,
},
sys::{Protocol, SocketAddr, TokioSocket},
NetlinkCodec, NetlinkFramed,
};
fn main() {
let mut socket = TokioSocket::new(Protocol::Route).unwrap();
// We could use the port number if we were interested in it.
let _port_number = socket.bind_auto().unwrap().port_number();
socket.connect(&SocketAddr::new(0, 0)).unwrap();
// `NetlinkFramed<RtnlMessage>` wraps the socket and provides
// Stream and Sink implementations for the messages.
let stream = NetlinkFramed::new(socket, NetlinkCodec::<NetlinkMessage<RtnlMessage>>::new());
// Create the payload for the request.
let payload: NetlinkPayload<RtnlMessage> =
RtnlMessage::GetLink(LinkMessage::from_parts(LinkHeader::new(), vec![])).into();
// Create the header for the request
let mut header = NetlinkHeader::new();
header.flags = NetlinkFlags::from(NLM_F_DUMP | NLM_F_REQUEST);
header.sequence_number = 1;
// Create the netlink packet itself
let mut packet = NetlinkMessage::new(header, payload);
// `finalize` is important: it garantees the header is consistent
// with the packet's payload. Having incorrect header can lead to
// a panic when the message is serialized.
packet.finalize();
// Serialize the packet and send it
let mut buf = vec![0; packet.header.length as usize];
packet.serialize(&mut buf[..packet.buffer_len()]);
println!(">>> {:?}", packet);
let stream = stream.send((packet, SocketAddr::new(0, 0))).wait().unwrap();
// Print all the incoming message (press ^C to exit)
stream
.for_each(|(packet, _addr)| {
println!("<<< {:?}", packet);
Ok(())
})
.wait()
.unwrap();
}
|
pub mod fps;
pub mod orbit;
pub mod unreal;
|
use std::collections::HashMap;
use crate::object;
use crate::store::TxStateRef;
use crate::transaction;
use crate::transaction::requirements::*;
pub fn lock_requirements(
tx_id: transaction::Id,
requirements: &Vec<TransactionRequirement>,
objects: &mut HashMap<object::Id, TxStateRef>) {
for r in requirements {
let optr = match r {
TransactionRequirement::LocalTime{..} => None,
TransactionRequirement::RevisionLock{pointer, ..} => Some(pointer),
TransactionRequirement::VersionBump{pointer, ..} => Some(pointer),
TransactionRequirement::RefcountUpdate{pointer, ..} => Some(pointer),
TransactionRequirement::DataUpdate{pointer, ..} => Some(pointer),
TransactionRequirement::KeyValueUpdate{pointer, required_revision, full_content_lock, key_requirements} => {
lock_kv_requirements(tx_id, objects.get_mut(&pointer.id).unwrap(), &full_content_lock, &key_requirements);
required_revision.map( |_| pointer)
},
};
if let Some(pointer) = optr {
objects.get(&pointer.id).unwrap().borrow_mut().locked_to_transaction = Some(tx_id);
}
}
}
fn lock_kv_requirements(
tx_id: transaction::Id,
state: &mut TxStateRef,
full_content_lock: &Vec<transaction::requirements::KeyRevision>,
key_requirements: &Vec<KeyRequirement>) {
let mut s = state.borrow_mut();
let mut object_lock: Option<transaction::Id> = None;
{
let kv = s.kv_state.as_mut().unwrap();
for kr in full_content_lock {
kv.content.get_mut(&kr.key).unwrap().locked_to_transaction = Some(tx_id);
}
for r in key_requirements {
match r {
KeyRequirement::Exists{key} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = Some(tx_id);
},
KeyRequirement::MayExist{key} => {
if let Some(kvs) = kv.content.get_mut(&key) {
kvs.locked_to_transaction = Some(tx_id);
}
},
KeyRequirement::DoesNotExist{key} => {
kv.no_existence_locks.insert(key.clone());
},
KeyRequirement::TimestampLessThan{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = Some(tx_id)
},
KeyRequirement::TimestampGreaterThan{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = Some(tx_id)
},
KeyRequirement::TimestampEquals{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = Some(tx_id)
},
KeyRequirement::KeyRevision{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = Some(tx_id)
},
KeyRequirement::KeyObjectRevision{..} => {
object_lock = Some(tx_id)
},
KeyRequirement::WithinRange{..} => {
object_lock = Some(tx_id)
}
}
}
}
s.locked_to_transaction = object_lock
}
pub fn unlock_requirements(
requirements: &Vec<TransactionRequirement>,
objects: &mut HashMap<object::Id, TxStateRef>) {
for r in requirements {
let optr = match r {
TransactionRequirement::LocalTime{..} => None,
TransactionRequirement::RevisionLock{pointer, ..} => Some(pointer),
TransactionRequirement::VersionBump{pointer, ..} => Some(pointer),
TransactionRequirement::RefcountUpdate{pointer, ..} => Some(pointer),
TransactionRequirement::DataUpdate{pointer, ..} => Some(pointer),
TransactionRequirement::KeyValueUpdate{pointer, required_revision, full_content_lock, key_requirements} => {
unlock_kv_requirements(objects.get_mut(&pointer.id).unwrap(), &full_content_lock, &key_requirements);
required_revision.map( |_| pointer)
},
};
if let Some(pointer) = optr {
objects.get(&pointer.id).unwrap().borrow_mut().locked_to_transaction = None;
}
}
}
fn unlock_kv_requirements(
state: &mut TxStateRef,
full_content_lock: &Vec<transaction::requirements::KeyRevision>,
key_requirements: &Vec<KeyRequirement>) {
let mut s = state.borrow_mut();
let mut unlock_object = false;
{
let kv = s.kv_state.as_mut().unwrap();
for kr in full_content_lock {
kv.content.get_mut(&kr.key).unwrap().locked_to_transaction = None;
}
for r in key_requirements {
match r {
KeyRequirement::Exists{key} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = None;
},
KeyRequirement::MayExist{key} => {
if let Some(s) = kv.content.get_mut(&key) {
s.locked_to_transaction = None;
}
},
KeyRequirement::DoesNotExist{key} => {
kv.no_existence_locks.remove(&key);
},
KeyRequirement::TimestampLessThan{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = None
},
KeyRequirement::TimestampGreaterThan{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = None
},
KeyRequirement::TimestampEquals{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = None
},
KeyRequirement::KeyRevision{key, ..} => {
kv.content.get_mut(&key).unwrap().locked_to_transaction = None
},
KeyRequirement::KeyObjectRevision{..} => {
unlock_object = true;
},
KeyRequirement::WithinRange{..} => {
unlock_object = true;
}
}
}
}
if unlock_object {
s.locked_to_transaction = None;
}
} |
use std::cmp::max;
fn is_prime(n: i64) -> bool {
if n < 2 {
return false;
}
let mut x = 2;
while x * x <= n {
if n % x == 0 {
return false;
}
x += 1;
}
return true;
}
#[test]
fn test_is_prime() {
assert!(!is_prime(1));
assert!(is_prime(2));
assert!(is_prime(47));
assert!(!is_prime(48));
}
fn is_pandigital(selected: &Vec<bool>) -> bool {
let mut flag = true;
for i in 1..10 {
if !flag && selected[i]{
return false;
}
if !selected[i] {
flag = false;
}
}
return true;
}
#[test]
fn test_is_pandigital() {
assert!(is_pandigital(&[false,true,true,true,false,false,false,false,false,false].to_vec()));
assert!(!is_pandigital(&[false,false,true,true,false,false,false,false,false,false].to_vec()));
assert!(!is_pandigital(&[false,true,true,true,false,false,false,false,false,true].to_vec()));
}
fn solve(num: i64, selected: &Vec<bool>) -> Option<i64> {
let mut ans = if is_prime(num) && is_pandigital(selected) {
Some(num)
} else {
None
};
for i in 1..10 {
if !selected[i as usize] {
let mut selected = selected.clone();
selected[i as usize] = true;
let res = solve(num * 10 + i, &selected);
if ans.is_none() && res.is_some() {
ans = res;
} else if ans.is_some() && res.is_some() {
ans = Some(max(ans.unwrap(), res.unwrap()));
}
}
}
return ans;
}
fn main() {
println!("{}", solve(0, &vec![false; 10]).unwrap())
} |
use std::mem;
use std::ops::Mul;
use ui_sys::{self, uiDrawMatrix};
/// A transformation which can be applied to the contents of a DrawContext.
#[derive(Copy, Clone, Debug)]
pub struct Transform {
ui_matrix: uiDrawMatrix,
}
impl Transform {
/// Create a Transform from an existing raw uiDrawMatrix.
pub fn from_ui_matrix(ui_matrix: &uiDrawMatrix) -> Transform {
Transform {
ui_matrix: *ui_matrix,
}
}
/// Create a new Transform that does nothing.
pub fn identity() -> Transform {
unsafe {
let mut matrix = mem::MaybeUninit::uninit();
ui_sys::uiDrawMatrixSetIdentity(matrix.as_mut_ptr());
Transform::from_ui_matrix(&matrix.assume_init())
}
}
/// Modify this Transform to translate by the given amounts.
pub fn translate(&mut self, x: f64, y: f64) {
unsafe { ui_sys::uiDrawMatrixTranslate(&mut self.ui_matrix, x, y) }
}
/// Modify this Transform to scale by the given amounts from the given center.
pub fn scale(&mut self, x_center: f64, y_center: f64, x: f64, y: f64) {
unsafe { ui_sys::uiDrawMatrixScale(&mut self.ui_matrix, x_center, y_center, x, y) }
}
/// Modify this Transform to rotate around the given center by the given angle.
pub fn rotate(&mut self, x: f64, y: f64, angle: f64) {
unsafe { ui_sys::uiDrawMatrixRotate(&mut self.ui_matrix, x, y, angle) }
}
/// Modify this Transform to skew from the given point by the given amount.
pub fn skew(&mut self, x: f64, y: f64, xamount: f64, yamount: f64) {
unsafe { ui_sys::uiDrawMatrixSkew(&mut self.ui_matrix, x, y, xamount, yamount) }
}
/// Compose this Transform with another, creating a Transform which represents both operations.
pub fn compose(&mut self, src: &Transform) {
unsafe { ui_sys::uiDrawMatrixMultiply(&mut self.ui_matrix, src.ptr()) }
}
/// Returns true if inverting this Transform is possible.
pub fn invertible(&self) -> bool {
unsafe {
ui_sys::uiDrawMatrixInvertible(
&self.ui_matrix as *const uiDrawMatrix as *mut uiDrawMatrix,
) != 0
}
}
/// Attempts to invert the Transform, returning true if it succeeded and false if it failed.
pub fn invert(&mut self) -> bool {
unsafe { ui_sys::uiDrawMatrixInvert(&mut self.ui_matrix) != 0 }
}
pub fn transform_point(&self, mut point: (f64, f64)) -> (f64, f64) {
unsafe {
ui_sys::uiDrawMatrixTransformPoint(
&self.ui_matrix as *const uiDrawMatrix as *mut uiDrawMatrix,
&mut point.0,
&mut point.1,
);
point
}
}
pub fn transform_size(&self, mut size: (f64, f64)) -> (f64, f64) {
unsafe {
ui_sys::uiDrawMatrixTransformSize(
&self.ui_matrix as *const uiDrawMatrix as *mut uiDrawMatrix,
&mut size.0,
&mut size.1,
);
size
}
}
pub fn ptr(&self) -> *mut uiDrawMatrix {
&self.ui_matrix as *const uiDrawMatrix as *mut uiDrawMatrix
}
}
impl Mul<Transform> for Transform {
type Output = Transform;
fn mul(mut self, other: Transform) -> Transform {
self.compose(&other);
self
}
}
|
// chapter 2 "using variables and types"
// program section:
fn main() {
// here starts the execution of the game.
// we begin with printing a welcome message:
println!("welcome to the game!");
}
/* output should be:
end of output */
|
fn foo(x: &i32) {
if *x == 0 { print!("0\n"); }
else if *x < 0 { print!("-\n"); }
else if *x > 0 { print!("+\n"); }
else { print!("absurd!\n"); 1/0; }
}
fn bar(x: &mut i32) {
foo(& *x);
*x = *x - 2;
foo(x);
}
fn main() {
let mut x = 8;
{
let y = &mut x;
*y = *y + 1;
};
foo(&x);
if x == 9 { print!("yes\n"); } else { print!("oups\n"); }
while x > 0 {
bar(&mut x);
}
if x == -1 { print!("yes\n"); } else { print!("oups\n"); }
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir @ fbb95f4)
// from gir-files (https://github.com/gtk-rs/gir-files @ 77d1f70)
// DO NOT EDIT
use Cancellable;
use Error;
use IOStream;
use Socket;
use SocketAddress;
use SocketFamily;
use SocketType;
use ffi;
use glib;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::mem;
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct SocketConnection(Object<ffi::GSocketConnection, ffi::GSocketConnectionClass>): IOStream;
match fn {
get_type => || ffi::g_socket_connection_get_type(),
}
}
impl SocketConnection {
pub fn factory_lookup_type(family: SocketFamily, type_: SocketType, protocol_id: i32) -> glib::types::Type {
unsafe {
from_glib(ffi::g_socket_connection_factory_lookup_type(family.to_glib(), type_.to_glib(), protocol_id))
}
}
pub fn factory_register_type(g_type: glib::types::Type, family: SocketFamily, type_: SocketType, protocol: i32) {
unsafe {
ffi::g_socket_connection_factory_register_type(g_type.to_glib(), family.to_glib(), type_.to_glib(), protocol);
}
}
}
pub trait SocketConnectionExt {
fn connect<'a, P: IsA<SocketAddress>, Q: Into<Option<&'a Cancellable>>>(&self, address: &P, cancellable: Q) -> Result<(), Error>;
fn connect_async<'a, P: IsA<SocketAddress>, Q: Into<Option<&'a Cancellable>>, R: FnOnce(Result<(), Error>) + Send + 'static>(&self, address: &P, cancellable: Q, callback: R);
fn get_local_address(&self) -> Result<SocketAddress, Error>;
fn get_remote_address(&self) -> Result<SocketAddress, Error>;
fn get_socket(&self) -> Option<Socket>;
fn is_connected(&self) -> bool;
fn connect_property_socket_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<SocketConnection> + IsA<glib::object::Object>> SocketConnectionExt for O {
fn connect<'a, P: IsA<SocketAddress>, Q: Into<Option<&'a Cancellable>>>(&self, address: &P, cancellable: Q) -> Result<(), Error> {
let cancellable = cancellable.into();
let cancellable = cancellable.to_glib_none();
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::g_socket_connection_connect(self.to_glib_none().0, address.to_glib_none().0, cancellable.0, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
fn connect_async<'a, P: IsA<SocketAddress>, Q: Into<Option<&'a Cancellable>>, R: FnOnce(Result<(), Error>) + Send + 'static>(&self, address: &P, cancellable: Q, callback: R) {
let cancellable = cancellable.into();
let cancellable = cancellable.to_glib_none();
let user_data: Box<Box<R>> = Box::new(Box::new(callback));
unsafe extern "C" fn connect_async_trampoline<R: FnOnce(Result<(), Error>) + Send + 'static>(_source_object: *mut gobject_ffi::GObject, res: *mut ffi::GAsyncResult, user_data: glib_ffi::gpointer)
{
callback_guard!();
let mut error = ptr::null_mut();
let _ = ffi::g_socket_connection_connect_finish(_source_object as *mut _, res, &mut error);
let result = if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) };
let callback: Box<Box<R>> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = connect_async_trampoline::<R>;
unsafe {
ffi::g_socket_connection_connect_async(self.to_glib_none().0, address.to_glib_none().0, cancellable.0, Some(callback), Box::into_raw(user_data) as *mut _);
}
}
fn get_local_address(&self) -> Result<SocketAddress, Error> {
unsafe {
let mut error = ptr::null_mut();
let ret = ffi::g_socket_connection_get_local_address(self.to_glib_none().0, &mut error);
if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) }
}
}
fn get_remote_address(&self) -> Result<SocketAddress, Error> {
unsafe {
let mut error = ptr::null_mut();
let ret = ffi::g_socket_connection_get_remote_address(self.to_glib_none().0, &mut error);
if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) }
}
}
fn get_socket(&self) -> Option<Socket> {
unsafe {
from_glib_none(ffi::g_socket_connection_get_socket(self.to_glib_none().0))
}
}
fn is_connected(&self) -> bool {
unsafe {
from_glib(ffi::g_socket_connection_is_connected(self.to_glib_none().0))
}
}
fn connect_property_socket_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::socket",
transmute(notify_socket_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn notify_socket_trampoline<P>(this: *mut ffi::GSocketConnection, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<SocketConnection> {
callback_guard!();
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&SocketConnection::from_glib_borrow(this).downcast_unchecked())
}
|
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use std::process::Command;
struct Ignore;
impl<E> From<E> for Ignore
where
E: Error,
{
fn from(_: E) -> Ignore {
Ignore
}
}
fn commit_hash() -> Result<String, Ignore> {
Ok(String::from_utf8(
Command::new("git")
.args(&["rev-parse", "HEAD"])
.output()?
.stdout,
)?)
}
fn commit_describe() -> Result<String, Ignore> {
Ok(String::from_utf8(
Command::new("git")
.args(&["describe", "--all", "--dirty", "--long"])
.output()?
.stdout,
)?)
}
fn commit_date() -> Result<String, Ignore> {
Ok(String::from_utf8(
Command::new("git")
.args(&["log", "-1", "--pretty=format:%cI"])
.output()?
.stdout,
)?)
}
fn main() {
let gitref = match commit_hash() {
Ok(v) => v.trim_end().to_string(),
Err(_) => "N/A".to_string(),
};
let gitdate = match commit_date() {
Ok(v) => v.trim_end().to_string(),
Err(_) => "N/A".to_string(),
};
let gitdescribe = match commit_describe() {
Ok(v) => v.trim_end().to_string(),
Err(_) => "N/A".to_string(),
};
let version = env!("CARGO_PKG_VERSION");
let new_content = format!(
"#[derive(Serialize, Debug, Clone)]
pub struct BuildInfo {{
pub version: &'static str,
pub commit_hash: &'static str,
pub commit_date: &'static str,
pub commit_describe: &'static str,
}}
pub static BUILD_INFO: BuildInfo = BuildInfo {{
version: \"{}\",
commit_hash: \"{}\",
commit_date: \"{}\",
commit_describe: \"{}\",
}};
",
version, gitref, gitdate, gitdescribe
);
let update = File::open("src/build_info.rs")
.map(|mut f| {
let mut contents = String::new();
f.read_to_string(&mut contents).unwrap();
return contents;
})
.map(|content| content != new_content)
.unwrap_or(true);
if update {
let mut file = File::create("src/build_info.rs").unwrap();
file.write_all(new_content.as_bytes()).unwrap();
}
}
|
use aoc20::days::day18;
#[test]
fn day18_test_part1() {
assert_eq!(71, day18::evaluate("1 + 2 * 3 + 4 * 5 + 6", day18::Part::Part1));
assert_eq!(51, day18::evaluate("1 + (2 * 3) + (4 * (5 + 6))", day18::Part::Part1));
assert_eq!(26, day18::evaluate("2 * 3 + (4 * 5)", day18::Part::Part1));
assert_eq!(437, day18::evaluate("5 + (8 * 3 + 9 + 3 * 4 * 3)", day18::Part::Part1));
assert_eq!(12240, day18::evaluate("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", day18::Part::Part1));
assert_eq!(13632, day18::evaluate("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", day18::Part::Part1));
}
#[test]
fn day18_test_part2() {
assert_eq!(231, day18::evaluate("1 + 2 * 3 + 4 * 5 + 6", day18::Part::Part2));
assert_eq!(51, day18::evaluate("1 + (2 * 3) + (4 * (5 + 6))", day18::Part::Part2));
assert_eq!(46, day18::evaluate("2 * 3 + (4 * 5)", day18::Part::Part2));
assert_eq!(1445, day18::evaluate("5 + (8 * 3 + 9 + 3 * 4 * 3)", day18::Part::Part2));
assert_eq!(669060, day18::evaluate("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", day18::Part::Part2));
assert_eq!(23340, day18::evaluate("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", day18::Part::Part2));
}
|
use structopt::StructOpt;
use riff::diff::{compare, CompareOptions};
#[derive(Debug, StructOpt)]
struct Opt {
/// Path to image (jpeg or png) to compare from
base_path: String,
/// Path to image (jpeg or png) to compare to
diff_path: String,
/// Path to output image (jpeg or png)
#[structopt(long = "output", short = "o", default_value = "./output.png")]
output_path: String,
/// The color of differing pixels in [R, G, B, A] format
#[structopt(long = "diffColour", short = "c", default_value = "[218, 165, 32, 255]", parse(try_from_str="parse_num_array"))]
diff_colour: [u32; 4],
/// Matching threshold, smaller values makes pixel comparison more sensitive
#[structopt(long = "threshold", short = "t", default_value = "0.1")]
threshold: f64,
/// Blending value of unchaged pixels
#[structopt(long = "alpha", short = "a")]
alpha: Option<f64>,
/// The region within base image to compare to in [x, y, width, height] format. Useful when comparing differently sized images
#[structopt(long="view", parse(try_from_str = "parse_num_array"))]
view_port: Option<[u32; 4]>
}
fn parse_num_array(array: &str) -> Result<[u32; 4], &'static str> {
let array = array.trim_start_matches("[")
.trim_end_matches("]")
.split(",");
let mut num_array: [u32; 4] = [255; 4];
for (i, el) in array.enumerate() {
num_array[i] = el.trim().parse::<u32>().expect("Argument incorrectly formatted, correct format should be: [a, b, c]");
}
Ok(num_array)
}
fn main() {
let opt = Opt::from_args();
let base = String::from(opt.base_path);
let diff = String::from(opt.diff_path);
let base_img = read_image_from_file(&base);
let diff_img = read_image_from_file(&diff);
let options = CompareOptions {
threshold: opt.threshold,
alpha: opt.alpha,
diff_colour: opt.diff_colour,
view_port: opt.view_port
};
let img = compare(&base_img, &diff_img, options);
match img.save(opt.output_path.clone()) {
Ok(_) => (),
Err(_) => panic!("Could not save output file at {}", opt.output_path)}
}
fn read_image_from_file(path: &String) -> image::DynamicImage {
match image::open(path) {
Ok(img) => img,
Err(_) => panic!("Could not read file at {}", path)
}
}
|
use crate::util::get_character_histogram;
use crate::util::hamming_distance;
use crate::util::score_text;
// Decrypt ciphertext using xor
pub fn decrypt_xor(key: u8, ciphertext: &[u8]) -> Vec<u8> {
ciphertext.iter().map(|c| c ^ key).collect::<Vec<u8>>()
}
// Decrypt using the key and return the score and decrypted value, or None if the decryption failed
fn get_score(key: u8, ciphertext: &[u8]) -> (usize, Vec<u8>) {
let string = decrypt_xor(key, &ciphertext);
(score_text(&string), string)
}
// Iterate through a list of keys, and try each key in turn, returning the highest-scoring
// plaintext, along with the key and score
fn try_decrypt_against_key_list<F>(
ciphertext: &[u8],
key_list: &[u8],
key_generator: F,
current_score: usize,
current_key: u8,
current_decrypted: &[u8],
) -> (usize, u8, Vec<u8>)
where
F: Fn(&u8) -> u8,
{
let (score, key, decrypted) = key_list.iter().fold(
(current_score, current_key, current_decrypted.to_vec()),
|(mut score, mut key, mut decrypted), byte| {
let test_key = key_generator(byte);
let (test_score, test_decrypted) = get_score(test_key, ciphertext);
if test_score > score {
score = test_score;
key = test_key;
decrypted = test_decrypted;
}
(score, key, decrypted)
},
);
(score, key, decrypted)
}
// Use a test string to guess the key
fn try_decrypt_with_test_string(
ciphertext: &[u8],
test_string: &[u8],
key_list: &[u8],
) -> Option<(usize, u8, Vec<u8>)> {
// Try each key in the key list against each
let (score, key, decrypted) = test_string.iter().fold(
(0, 0u8, Vec::<u8>::new()),
|(score, key, decrypted), value| {
// Iterate over the key list,
let (local_score, local_key, local_decrypted) = try_decrypt_against_key_list(
ciphertext,
key_list,
|b| b ^ value,
score,
key,
&decrypted.clone(),
);
if local_score > score {
(local_score, local_key, local_decrypted)
} else {
(score, key, decrypted)
}
},
);
if score > 0 {
return Some((score, key, decrypted));
}
None
}
// Try to brute force the decryption by iterating through all 255 keys
pub fn brute_force_xor_key(ciphertext: &[u8]) -> Option<(usize, u8, Vec<u8>)> {
Some(try_decrypt_against_key_list(
ciphertext,
&(0u8..255u8).collect::<Vec<u8>>(),
|b| *b,
0,
0,
&Vec::<u8>::new(),
))
}
// Find the key by finding the most frequent chars in the ciphertext
// and then test them against a list of the most frequent characters in English
pub fn find_xor_key(ciphertext: &[u8]) -> Option<(usize, u8, Vec<u8>)> {
let histogram = get_character_histogram(&ciphertext);
let etaoin = b" eEtTaAoOiInN";
try_decrypt_with_test_string(ciphertext, &histogram, etaoin)
}
// Encrypt/decrypt using a repeating key and xor
pub fn encrypt_decrypt_repeating_key_xor(key: &[u8], plain_or_ciphertext: &[u8]) -> Vec<u8> {
// How many times to repeat the key
let repeat = (plain_or_ciphertext.len() as f32 / key.len() as f32).ceil() as usize;
// Generate the repeated key which has the same length as the text
let mut repeated_key =
std::iter::repeat(key)
.take(repeat)
.fold(Vec::<u8>::new(), |mut v, b| {
v.append(&mut b.to_vec());
v
});
repeated_key.truncate(plain_or_ciphertext.len());
// Xor the key with the text and return the result
plain_or_ciphertext
.iter()
.zip(repeated_key.iter())
.map(|(a, b)| a ^ b)
.collect::<Vec<u8>>()
}
// Returns a list of the keysizes, ordered by distance
pub fn find_repeating_xor_keysize(string: &[u8]) -> Vec<usize> {
let keysizes = 2..40;
// For each key size...
let mut keysize_distance_list = keysizes
.map(|keysize| {
// split string into keysize chunks and partition into groups of even and odd chunks...
let (even, odd): (Vec<(usize, &[u8])>, Vec<(usize, &[u8])>) = string
.chunks(keysize)
.enumerate()
.partition(|(i, _)| i % 2 == 0);
// Zip the groups together and add up the hamming distances between each pair
let distance = even.into_iter().zip(odd.into_iter()).fold(
0usize,
|mut acc, ((_, first), (_, second))| {
acc += hamming_distance(&first, &second);
acc
},
);
// average distance normalized by dividing by key length
// avg = total distance / num samples / key length
// num samples = string length / key length
// therefore avg distance = total distance over string length
let normalized = distance as f32 / string.len() as f32;
(keysize, normalized)
})
.collect::<Vec<(usize, f32)>>();
keysize_distance_list.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap());
keysize_distance_list
.iter()
.map(|(keysize, _)| *keysize)
.collect::<Vec<usize>>()
}
pub fn break_repeating_key_xor(ciphertext: &[u8], keysize: usize) -> Vec<u8> {
let mut vector_of_vectors: Vec<Vec<u8>> = Vec::new();
for _ in 0..keysize {
vector_of_vectors.push(Vec::new());
}
ciphertext.iter().enumerate().for_each(|(index, value)| {
let chunk_index = index % keysize;
vector_of_vectors[chunk_index].push(*value);
});
vector_of_vectors
.iter()
.fold(vec![] as Vec<u8>, |mut key, string| {
let (_, k, _) = find_xor_key(&string).unwrap();
key.push(k);
key
})
}
|
use reqwest;
use std::cell::RefCell;
use std::convert::AsRef;
use std::env;
use std::fs::{self, DirEntry, File};
use std::future::{Future, Ready};
use std::io::{self, BufRead, BufReader};
use std::marker::PhantomData;
use std::path::Path;
use std::process;
use std::sync::{Arc, Mutex};
use tokio::runtime::Runtime;
use tokio::task::{self, JoinHandle};
const TargetURL: &'static str = "https://api.tinify.com/shrink";
#[tokio::main]
async fn main() {
let authorization = env::var("TINY_PNG_KEY").unwrap_or("".to_string());
if authorization.len() == 0 {
eprintln!("please provide a tiny png api key");
process::exit(1);
}
let target = env::args().nth(1).unwrap_or(".".to_string());
let client = reqwest::Client::new();
let mut tiny = Tiny {
inner: TinyPNG {
authorization,
client,
},
};
let mut handles = Vec::new();
// let tiny = tiny.lock().unwrap();
tiny.walk(&Path::new(&target), &mut handles);
for h in handles {
// let h = *h;
h.await;
}
}
#[derive(Clone)]
struct TinyPNG {
authorization: String,
client: reqwest::Client,
}
struct Tiny {
inner: TinyPNG,
// handles: Vec<JoinHandle<()>>,
// phantom: PhantomData<&'b T>,
}
enum TinyPNGError {
RequestError(reqwest::Error),
}
// error 转换
impl From<reqwest::Error> for TinyPNGError {
fn from(err: reqwest::Error) -> Self {
TinyPNGError::RequestError(err)
}
}
impl Tiny {
pub fn walk(&mut self, dir: &Path, handles: &mut Vec<JoinHandle<()>>) -> io::Result<()> {
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
self.walk(&path, handles)?;
} else {
let tiny_png = self.inner.clone();
handles.push(task::spawn(async move {
// let c = tiny_png.lock().unwrap();
tiny_png.post_file(&path).await;
}));
}
}
}
Ok(())
}
pub async fn wait(&self) {}
}
impl TinyPNG {
pub async fn post_file(&self, file_path: &Path) -> Result<(), TinyPNGError> {
let req = self
.client
.post(TargetURL)
.header("Content-Type", "application/x-www-form-urlencoded")
.basic_auth("api", Some(&self.authorization))
.body(fs::read(file_path).unwrap());
// self.handles.push(tokio::task::spawn(async move || ->Result<(), TinyPNGError>{
let text = req.send().await?.text().await?;
println!("{}", text);
Ok(())
}
// 替换本地文件
pub fn download_file() {
//
}
}
|
// Call Machine layor
// Currently not to use all SBI call, allow unused variables and functions just for now.
#![allow(unused)]
// *************************************************************************************
// SBI
#[inline(always)]
fn sbi_call(which: usize, arg0: usize, arg1: usize, arg2: usize) -> usize {
let ret;
unsafe {
llvm_asm!("ecall"
: "={x10}" (ret)
: "{x10}" (arg0), "{x11}" (arg1), "{x12}" (arg2), "{x17}" (which)
: "memory" // 如果汇编可能改变memory,加入 memory Option 防止改变
: "volatile"); // 防止汇编器激进优化
}
ret
}
// *************************************************************************************
//
const SBI_SET_TIMER: usize = 0;
const SBI_CONSOLE_PUTCHAR: usize = 1;
const SBI_CONSOLE_GETCHAR: usize = 2;
const SBI_CLEAR_IPI: usize = 3;
const SBI_SEND_IPI: usize = 4;
const SBI_REMOTE_FENCE_I: usize = 5;
const SBI_REMOTE_SFENCE_VMA: usize = 6;
const SBI_REMOTE_SFENCE_VMA_ASID: usize = 7;
const SBI_SHUTDOWN: usize = 8; // 关机函数
// *************************************************************************************
//
//
//
// 向控制台输出一个char
// 并不能直接使用 Rust 的
pub fn console_putchar(c: usize) {
sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);
}
//
//
// 从控制态读取一个char
// 失败 return -1
pub fn console_getchar() -> usize {
sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)
}
//
//
//
// SBI_SHUTDOWN OS (quit QEMU)
pub fn shutdown() -> ! {
sbi_call(SBI_SHUTDOWN, 0, 0, 0);
unreachable!()
}
//
//
//
// 设置下一次时钟中断时间 (interrupt/timer.rs)
pub fn set_timer(time: usize) {
sbi_call(SBI_SET_TIMER, time, 0, 0);
}
|
use std::collections::HashMap;
static FILENAME: &str = "input/data";
struct Bus {
no: usize,
departure: usize,
}
fn main() {
let data = std::fs::read_to_string(FILENAME).expect("could not read file");
let time = parse_time(&data);
let buses: Vec<Bus> = parse_buses(&data);
println!("{}", part_one(time, &buses));
println!("{}", part_two(&buses));
}
fn part_one(time: usize, buses: &Vec<Bus>) -> usize {
let mut mem: HashMap<usize, usize> = HashMap::new();
for bus in buses {
let modulus = time as f32 % bus.no as f32;
mem.insert(bus.no, bus.no - modulus as usize);
}
let mut lowest = (0, std::usize::MAX);
for (k, v) in mem {
if v < lowest.1 {
lowest = (k, v);
}
}
lowest.0 * lowest.1
}
fn part_two(buses: &Vec<Bus>) -> usize {
let mut time = 0;
let mut lcd = 1;
for bus in buses.iter() {
while (time + bus.departure) % bus.no != 0 {
time += lcd;
}
lcd *= bus.no;
}
time
}
fn parse_time(data: &str) -> usize {
let mut time = 0;
let mut i = 0;
for line in data.lines() {
match i {
0 => time = line.parse::<usize>().unwrap(),
_ => (),
}
i += 1;
}
time
}
fn parse_buses(data: &str) -> Vec<Bus> {
let mut buses: Vec<Bus> = Vec::new();
let mut i = 0;
for line in data.lines() {
match i {
1 => {
let opt: Option<Vec<String>> =
line.trim().split(",").map(|s| s.parse().ok()).collect();
let vec = opt.unwrap();
let mut count = 0;
for s in vec {
if s != "x" {
let bus = Bus {
no: s.parse::<usize>().unwrap(),
departure: count,
};
buses.push(bus);
}
count += 1;
}
}
_ => (),
}
i += 1;
}
buses
}
mod tests {
#[test]
fn test_part_one() {
let data = std::fs::read_to_string(super::FILENAME).expect("could not read file");
let time = super::parse_time(&data);
let buses = super::parse_buses(&data);
assert_eq!(3385, super::part_one(time, &buses));
}
#[test]
fn test_part_two() {
let data = std::fs::read_to_string(super::FILENAME).expect("could not read file");
let buses = super::parse_buses(&data);
assert_eq!(600689120448303, super::part_two(&buses));
}
}
|
use std::mem;
pub struct LinkedList {
tail: Link
}
struct Node {
data: u32,
next: Link,
}
enum Link {
Nothing,
Something(Box<Node>),
}
impl LinkedList {
pub fn new() -> Self {
LinkedList {
tail: Link::Nothing,
}
}
pub fn insert(
&mut self,
data: u32
) {
let node = Box::new(Node {
data: data,
/* next = self.tail cannot be written because enumerations
are moved by default; the value of self.tail cannot be moved
as it would let self in an incomplete state after movement;
replace() replaces the value at a mutable location with the new value,
returns the old one, so no moves occure */
next: mem::replace(&mut self.tail, Link::Nothing)
});
self.tail = Link::Something(node);
}
pub fn pop(&mut self) -> Option<u32> {
/* take the value inside self.tail without borrowing,
insert Link::Nothing inside */
match mem::replace(&mut self.tail, Link::Nothing) {
Link::Something(value) => {
/* we have to move the whole node out of the box */
let node = *value;
self.tail = node.next;
Some(node.data)
},
Link::Nothing => {
None
}
}
}
pub fn drop(&mut self) {
/* get the value of the tail node and replace it by Nothing at self.tail */
let mut current = mem::replace(&mut self.tail, Link::Nothing);
/* we get the wrapped node for every iteration;
create a scope for the wrapped node to delete */
while let Link::Something(mut to_delete) = current {
/* the current value used for iteration now contains
the next node of the current node to delete;
the next parameter of the current node to delete
is now equal to Nothing */
current = mem::replace(&mut to_delete.next, Link::Nothing);
/* to_delete goes out of the scope and is deleted */
}
}
}
#[cfg(test)]
mod tests {
use super::LinkedList;
#[test]
fn test_create() {
let mut list = LinkedList::new();
list.insert(10);
list.insert(20);
list.insert(30);
list.insert(40);
assert_eq!(
list.pop(),
Some(40),
"40 expected !"
);
assert_eq!(
list.pop(),
Some(30),
"30 expected !"
);
list.drop();
assert_eq!(
list.pop(),
None,
"None is expected !"
);
}
}
|
pub use platform::*;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub mod platform {
use option::Option::{self, Some, None};
pub fn u8_safe_div(lhs: u8, rhs: u8) -> Option<(u8, u8)> {
if rhs == 0 {
None
} else {
let div: u8;
let rem: u8;
unsafe {
asm!("div $0"
: "={al}"(div), "={ah}"(rem)
: "{al}"(lhs), "{ah}"(0u8), "0"(rhs));
}
(div, rem)
}
}
pub fn u16_safe_div(lhs: u16, rhs: u16) -> Option<(u16, u16)> {
if rhs == 0 {
None
} else {
let div: u16;
let rem: u16;
unsafe {
asm!("div $0"
: "={ax}"(div), "={dx}"(rem)
: "{ax}"(lhs), "{dx}"(0u8), "0"(rhs));
}
(div, rem)
}
}
pub fn u32_safe_div(lhs: u32, rhs: u32) -> Option<(u32, u32)> {
if rhs == 0 {
None
} else {
let div: u32;
let rem: u32;
unsafe {
asm!("div $0"
: "={eax}"(div), "={edx}"(rem)
: "{eax}"(lhs), "{edx}"(0u8), "0"(rhs));
}
(div, rem)
}
}
#[cfg(target_arch = "x86_64")]
pub fn u64_safe_div(lhs: u64, rhs: u64) -> Option<(u64, u64)> {
if rhs == 0 {
None
} else {
let div: u64;
let rem: u64;
unsafe {
asm!("div $0"
: "={rax}"(div), "={rdx}"(rem)
: "{rax}"(lhs), "{rdx}"(0u8), "0"(rhs));
}
(div, rem)
}
}
#[cfg(target_arch = "x86")]
pub fn usize_safe_div(lhs: usize, rhs: usize) -> Option<(usize, usize)> {
u32_safe_div(lhs as u32, rhs as u32)
}
#[cfg(target_arch = "x86_64")]
pub fn usize_safe_div(lhs: usize, rhs: usize) -> Option<(usize, usize)> {
u64_safe_div(lhs as u32, rhs as u32)
}
}
|
use sdl2::{surface::SurfaceRef, rect::Rect, video::Window, render::{Texture, Canvas}, pixels::PixelFormatEnum};
pub trait RenderSurface {
fn render_surface(&mut self, dest: Rect, src: &SurfaceRef, src_box: Rect) -> Result<(),String>;
fn create_cache_tex(&mut self, size: (u32,u32), p: PixelFormatEnum) -> Result<Texture<'static>,String>;
}
impl RenderSurface for Canvas<Window> {
#[inline]
fn render_surface(&mut self, dest: Rect, src: &SurfaceRef, src_box: Rect) -> Result<(),String> {
let t = self.texture_creator();
let t = t
.create_texture_from_surface(src)
.map_err(|e| e.to_string() )?;
self.copy(&t,src_box,dest)
}
#[inline]
fn create_cache_tex(&mut self, size: (u32,u32), p: PixelFormatEnum) -> Result<Texture<'static>,String> {
let t = self.texture_creator();
let t = Box::leak(Box::new(t));
t
.create_texture_static(p, size.0, size.1)
.map_err(|e| e.to_string() )
}
}
|
mod topic;
mod user;
use topic::*;
use user::*;
use crate::controller::ControllerConfig;
use async_trait::async_trait;
use drogue_client::{
core::v1::Conditions,
meta::v1::CommonMetadataMut,
registry::{self, v1::KafkaAppStatus},
Translator,
};
use drogue_cloud_operator_common::controller::{
base::{ConditionExt, ControllerOperation, ProcessOutcome, ReadyState, CONDITION_RECONCILED},
reconciler::{
progress::{
self, application::ApplicationAccessor, OperationOutcome, Progressor, RunConstructor,
},
ReconcileError, ReconcileProcessor, ReconcileState, Reconciler,
},
};
use drogue_cloud_service_api::kafka::{make_kafka_resource_name, ResourceType};
use k8s_openapi::api::core::v1::Secret;
use kube::{
api::{ApiResource, DynamicObject},
Api,
};
use operator_framework::install::Delete;
use std::{ops::Deref, time::Duration};
const FINALIZER: &str = "kafka";
const LABEL_KAFKA_CLUSTER: &str = "strimzi.io/cluster";
pub const ANNOTATION_APP_NAME: &str = "drogue.io/application-name";
pub struct ApplicationController {
config: ControllerConfig,
registry: registry::v1::Client,
kafka_topic_resource: ApiResource,
kafka_topics: Api<DynamicObject>,
kafka_user_resource: ApiResource,
kafka_users: Api<DynamicObject>,
secrets: Api<Secret>,
}
impl ApplicationController {
pub fn new(
config: ControllerConfig,
registry: registry::v1::Client,
kafka_topic_resource: ApiResource,
kafka_topics: Api<DynamicObject>,
kafka_user_resource: ApiResource,
kafka_users: Api<DynamicObject>,
secrets: Api<Secret>,
) -> Self {
Self {
config,
registry,
kafka_topic_resource,
kafka_topics,
kafka_user_resource,
kafka_users,
secrets,
}
}
}
#[async_trait]
impl ControllerOperation<String, registry::v1::Application, registry::v1::Application>
for ApplicationController
{
async fn process_resource(
&self,
application: registry::v1::Application,
) -> Result<ProcessOutcome<registry::v1::Application>, ReconcileError> {
ReconcileProcessor(ApplicationReconciler {
config: &self.config,
registry: &self.registry,
kafka_topic_resource: &self.kafka_topic_resource,
kafka_topics: &self.kafka_topics,
kafka_user_resource: &self.kafka_user_resource,
kafka_users: &self.kafka_users,
secrets: &self.secrets,
})
.reconcile(application)
.await
}
async fn recover(
&self,
message: &str,
mut app: registry::v1::Application,
) -> Result<registry::v1::Application, ()> {
let mut conditions = app
.section::<KafkaAppStatus>()
.and_then(|s| s.ok().map(|s| s.conditions))
.unwrap_or_default();
conditions.update(CONDITION_RECONCILED, ReadyState::Failed(message.into()));
app.finish_ready::<KafkaAppStatus>(conditions, app.metadata.generation)
.map_err(|_| ())?;
Ok(app)
}
}
impl Deref for ApplicationController {
type Target = registry::v1::Client;
fn deref(&self) -> &Self::Target {
&self.registry
}
}
pub struct ConstructContext {
pub app: registry::v1::Application,
pub events_topic: Option<DynamicObject>,
pub events_topic_name: Option<String>,
pub app_user: Option<DynamicObject>,
pub app_user_name: Option<String>,
}
pub struct DeconstructContext {
pub app: registry::v1::Application,
pub status: Option<KafkaAppStatus>,
}
pub struct ApplicationReconciler<'a> {
pub config: &'a ControllerConfig,
pub registry: &'a registry::v1::Client,
pub kafka_topic_resource: &'a ApiResource,
pub kafka_topics: &'a Api<DynamicObject>,
pub kafka_user_resource: &'a ApiResource,
pub kafka_users: &'a Api<DynamicObject>,
pub secrets: &'a Api<Secret>,
}
#[async_trait]
impl<'a> Reconciler for ApplicationReconciler<'a> {
type Input = registry::v1::Application;
type Output = registry::v1::Application;
type Construct = ConstructContext;
type Deconstruct = DeconstructContext;
async fn eval_state(
&self,
app: Self::Input,
) -> Result<ReconcileState<Self::Output, Self::Construct, Self::Deconstruct>, ReconcileError>
{
let status = app.section::<KafkaAppStatus>().and_then(|s| s.ok());
let configured = app.metadata.finalizers.iter().any(|f| f == FINALIZER);
let deleted = app.metadata.deletion_timestamp.is_some();
Ok(match (configured, deleted) {
(_, false) => ReconcileState::Construct(ConstructContext {
app,
events_topic: None,
events_topic_name: None,
app_user: None,
app_user_name: None,
}),
(true, true) => ReconcileState::Deconstruct(DeconstructContext { app, status }),
(false, true) => ReconcileState::Ignore(app),
})
}
async fn construct(
&self,
ctx: Self::Construct,
) -> Result<ProcessOutcome<Self::Output>, ReconcileError> {
Progressor::<Self::Construct>::new(vec![
Box::new(("HasFinalizer", |mut ctx: Self::Construct| async {
// ensure we have a finalizer
if ctx.app.metadata.ensure_finalizer(FINALIZER) {
// early return
Ok(OperationOutcome::Retry(ctx, None))
} else {
Ok(OperationOutcome::Continue(ctx))
}
})),
Box::new(CreateTopic {
api: &self.kafka_topics,
resource: &self.kafka_topic_resource,
config: &self.config,
}),
Box::new(TopicReady {
config: &self.config,
}),
Box::new(CreateUser {
users_api: &self.kafka_users,
users_resource: &self.kafka_user_resource,
secrets_api: &self.secrets,
config: &self.config,
}),
Box::new(UserReady {
config: &self.config,
secrets: &self.secrets,
}),
])
.run_with::<KafkaAppStatus>(ctx)
.await
}
async fn deconstruct(
&self,
mut ctx: Self::Deconstruct,
) -> Result<ProcessOutcome<Self::Output>, ReconcileError> {
// delete
let topic_name =
make_kafka_resource_name(ResourceType::Events(ctx.app.metadata.name.clone()));
let user_name =
make_kafka_resource_name(ResourceType::Users(ctx.app.metadata.name.clone()));
let password_name =
make_kafka_resource_name(ResourceType::Passwords(ctx.app.metadata.name.clone()));
// remove topic
self.kafka_topics
.delete_optionally(&topic_name, &Default::default())
.await?;
self.kafka_users
.delete_optionally(&user_name, &Default::default())
.await?;
self.secrets
.delete_optionally(&password_name, &Default::default())
.await?;
// TODO: wait for resources to be actually deleted, then remove the finalizer
// remove finalizer
ctx.app.metadata.finalizers.retain(|f| f != FINALIZER);
// done
Ok(ProcessOutcome::Complete(ctx.app))
}
}
impl ApplicationAccessor for ConstructContext {
fn app(&self) -> ®istry::v1::Application {
&self.app
}
fn app_mut(&mut self) -> &mut registry::v1::Application {
&mut self.app
}
fn into(self) -> registry::v1::Application {
self.app
}
fn conditions(&self) -> Conditions {
self.app
.section::<KafkaAppStatus>()
.and_then(|s| s.ok())
.unwrap_or_default()
.conditions
}
}
fn retry<C>(ctx: C) -> progress::Result<C>
where
C: Send + Sync,
{
Ok(OperationOutcome::Retry(ctx, Some(Duration::from_secs(15))))
}
fn condition_ready(condition: &str, resource: &DynamicObject) -> Option<bool> {
resource.data["status"]["conditions"]
.as_array()
.and_then(|conditions| {
conditions
.iter()
.filter_map(|cond| cond.as_object())
.filter_map(|cond| {
if cond["type"] == condition {
match cond["status"].as_str() {
Some("True") => Some(true),
Some("False") => Some(false),
_ => None,
}
} else {
None
}
})
.next()
})
}
|
// date and time
extern crate chrono;
use crud::chrono::prelude::Utc;
// use models::{NewPost};
use models::{NewPost, NewUser, NewYtb};
pub fn create_post(
user_id: i32,
media_url: Option<String>,
media_title: Option<String>,
title: String,
subtitle: String,
content: String,
tags: Option<Vec<String>>,
) -> NewPost {
let now = Utc::now().naive_utc();
NewPost {
user_id,
media_url,
media_title,
title,
subtitle,
content,
created_at: now,
updated_at: Some(now),
tags,
}
}
pub fn create_user(
first_name: String,
last_name: String,
email: String,
password: String,
avatar: Option<String>,
youtube_channel: Option<String>
) -> NewUser {
// let now = Utc::now().naive_utc();
// include created_at
NewUser {
first_name,
last_name,
email,
password,
avatar,
youtube_channel,
}
}
pub fn create_ytb(
id: String,
user_id: String,
content: String,
// tags: Option<Vec<String>>,
) -> NewYtb {
NewYtb {
id,
user_id,
content,
// tags,
}
}
|
use crate::gameloop::*;
use crate::prelude::*;
use bincode::Options;
use js_sys::JsString;
use serde::de::DeserializeOwned;
use std::future::Future;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use wasm_bindgen_futures::spawn_local;
use web_sys::HtmlElement;
use winit::dpi::LogicalSize;
use winit::event::WindowEvent;
use winit::event_loop::{EventLoop, EventLoopProxy};
use winit::platform::web::WindowExtWebSys;
use winit::window::{Window, WindowBuilder};
pub fn launch<G, F>(
wb: WindowBuilder,
ups: f64,
lockstep: bool,
init: impl FnOnce(&Window, Gl, EventLoopProxy<G::UserEvent>, LocalExecutor) -> F,
) where
G: Game + 'static,
F: Future<Output = G> + 'static,
{
let el = EventLoop::with_user_event();
let document = web_sys::window().unwrap().document().unwrap();
let container = document.body().unwrap();
let w = container.client_width();
let h = container.client_height();
let window = wb
.with_inner_size(LogicalSize::new(w, h))
.build(&el)
.unwrap();
let attributes = js_sys::Object::new();
js_sys::Reflect::set(&attributes, &"alpha".into(), &false.into()).unwrap();
let gl = Gl::new(
window
.canvas()
.get_context_with_context_options("webgl2", &attributes)
.unwrap()
.unwrap()
.dyn_into()
.unwrap(),
);
unsafe {
gl.bind_vertex_array(gl.create_vertex_array().ok());
}
let game_future = init(
&window,
gl,
el.create_proxy(),
LocalExecutor { _private: () },
);
spawn_local(async move {
let game = GamePlatformWrapper {
game: game_future.await,
container,
window,
};
game.container
.append_with_node_1(&game.window.canvas())
.unwrap();
game.window.canvas().focus().ok();
webutil::global::set_timeout(0, move || gameloop(el, game, ups, lockstep)).forget();
});
}
pub(crate) struct GamePlatformWrapper<G: Game> {
game: G,
container: HtmlElement,
window: Window,
}
#[derive(Clone)]
pub struct LocalExecutor {
_private: (),
}
impl<G: Game> GamePlatformWrapper<G> {
pub(crate) fn update(&mut self) -> GameloopCommand {
self.game.update(&self.window)
}
pub(crate) fn render(&mut self, alpha: f64, smooth_delta: f64) {
self.game.render(&self.window, alpha, smooth_delta);
}
pub(crate) fn event(&mut self, event: WindowEvent) -> GameloopCommand {
self.game.event(&self.window, event)
}
pub(crate) fn user_event(&mut self, event: G::UserEvent) -> GameloopCommand {
self.game.user_event(&self.window, event)
}
pub(crate) fn begin_frame(&mut self) {
let w = self.container.client_width();
let h = self.container.client_height();
self.window.set_inner_size(LogicalSize::new(w, h));
}
}
impl LocalExecutor {
pub fn spawn(&self, f: impl Future<Output = ()> + 'static) {
spawn_local(f);
}
}
pub async fn load_binary(source: &str) -> Result<Vec<u8>, String> {
let buffer = super::load_buffer(source).await?;
Ok(js_sys::Uint8Array::new(&buffer).to_vec())
}
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(catch, js_namespace = localStorage, js_name = getItem)]
fn get_item(key: &str) -> Result<Option<JsString>, JsValue>;
#[wasm_bindgen(catch, js_namespace = localStorage, js_name = setItem)]
fn set_item(key: &str, value: &JsString) -> Result<(), JsValue>;
}
pub fn store<T: Serialize>(key: &str, value: &T, human_readable: bool) -> Result<(), String> {
let mut serialized = bincode::serialize(value).map_err(|e| e.to_string())?;
if serialized.len() % 2 != 0 {
serialized.push(0);
}
if serialized.len() > 5 * 1024 * 1024 {
web_sys::console::warn_1(&JsValue::from_str(&format!(
"Local storage object '{}' exceeds 5 MB",
key
)));
}
let value = JsString::from_char_code(unsafe {
// View the even-length [u8] as a [u16].
// This is little-endian because wasm32 is little-endian.
std::slice::from_raw_parts(serialized.as_ptr() as *const _, serialized.len() / 2)
});
set_item(key, &value).map_err(super::js_err)
}
pub fn load<T: DeserializeOwned>(key: &str, human_readable: bool) -> Result<Option<T>, String> {
let data = match get_item(key) {
Ok(Some(v)) => v.iter().collect::<Vec<_>>(),
Ok(None) => return Ok(None),
Err(e) => return Err(super::js_err(e)),
};
let data = unsafe {
// View the [u16] as a [u8].
// This is little-endian because wasm32 is little-endian.
std::slice::from_raw_parts(data.as_ptr() as *const _, data.len() * 2)
};
bincode::options()
.allow_trailing_bytes()
.deserialize(data)
.map_err(|e| e.to_string())
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AuthType {
#[serde(rename = "systemAssignedIdentity")]
SystemAssignedIdentity,
#[serde(rename = "userAssignedIdentity")]
UserAssignedIdentity,
#[serde(rename = "servicePrincipalSecret")]
ServicePrincipalSecret,
#[serde(rename = "servicePrincipalCertificate")]
ServicePrincipalCertificate,
#[serde(rename = "secret")]
Secret,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AuthInfoBase {
#[serde(rename = "authType")]
pub auth_type: AuthType,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SecretAuthInfo {
#[serde(flatten)]
pub auth_info_base: AuthInfoBase,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub secret: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UserAssignedIdentityAuthInfo {
#[serde(flatten)]
pub auth_info_base: AuthInfoBase,
#[serde(rename = "clientId")]
pub client_id: String,
#[serde(rename = "subscriptionId")]
pub subscription_id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemAssignedIdentityAuthInfo {
#[serde(flatten)]
pub auth_info_base: AuthInfoBase,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServicePrincipalSecretAuthInfo {
#[serde(flatten)]
pub auth_info_base: AuthInfoBase,
#[serde(rename = "clientId")]
pub client_id: String,
#[serde(rename = "principalId")]
pub principal_id: String,
pub secret: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServicePrincipalCertificateAuthInfo {
#[serde(flatten)]
pub auth_info_base: AuthInfoBase,
#[serde(rename = "clientId")]
pub client_id: String,
#[serde(rename = "principalId")]
pub principal_id: String,
pub certificate: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkerResource {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
pub properties: LinkerProperties,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkerPatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<LinkerProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkerList {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<LinkerResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinkerProperties {
#[serde(rename = "targetId", default, skip_serializing_if = "Option::is_none")]
pub target_id: Option<String>,
#[serde(rename = "authInfo", default, skip_serializing_if = "Option::is_none")]
pub auth_info: Option<AuthInfoBase>,
#[serde(rename = "clientType", default, skip_serializing_if = "Option::is_none")]
pub client_type: Option<linker_properties::ClientType>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
pub mod linker_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ClientType {
#[serde(rename = "none")]
None,
#[serde(rename = "dotnet")]
Dotnet,
#[serde(rename = "java")]
Java,
#[serde(rename = "python")]
Python,
#[serde(rename = "go")]
Go,
#[serde(rename = "php")]
Php,
#[serde(rename = "ruby")]
Ruby,
#[serde(rename = "django")]
Django,
#[serde(rename = "nodejs")]
Nodejs,
#[serde(rename = "springBoot")]
SpringBoot,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SourceConfiguration {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SourceConfigurationResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub configurations: Vec<SourceConfiguration>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ValidateResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "linkerStatus", default, skip_serializing_if = "Option::is_none")]
pub linker_status: Option<validate_result::LinkerStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(rename = "reportStartTimeUtc", default, skip_serializing_if = "Option::is_none")]
pub report_start_time_utc: Option<String>,
#[serde(rename = "reportEndTimeUtc", default, skip_serializing_if = "Option::is_none")]
pub report_end_time_utc: Option<String>,
#[serde(rename = "targetId", default, skip_serializing_if = "Option::is_none")]
pub target_id: Option<String>,
#[serde(rename = "authType", default, skip_serializing_if = "Option::is_none")]
pub auth_type: Option<AuthType>,
}
pub mod validate_result {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LinkerStatus {
Healthy,
#[serde(rename = "Not healthy")]
NotHealthy,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<operation::Origin>,
#[serde(rename = "actionType", default, skip_serializing_if = "Option::is_none")]
pub action_type: Option<operation::ActionType>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Origin {
#[serde(rename = "user")]
User,
#[serde(rename = "system")]
System,
#[serde(rename = "user,system")]
UserSystem,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ActionType {
Internal,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemData {
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
pub mod system_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
|
use std::io::{self};
fn main() {
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
print!("{}", count(input.as_str()));
}
fn count(line: &str) -> i32 {
let mut num = 0;
for char in line.chars() {
match char {
'1' => num += 1,
_ => {}
}
}
return num;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_count_1() {
assert_eq!(count("101"), 2);
}
fn test_count_2() {
assert_eq!(count("000"), 0);
}
}
|
//! Transport from BigQuery Source to Arrow Destination.
use crate::{
destinations::arrow::{typesystem::ArrowTypeSystem, ArrowDestination, ArrowDestinationError},
impl_transport,
sources::bigquery::{BigQuerySource, BigQuerySourceError, BigQueryTypeSystem},
typesystem::TypeConversion,
};
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum BigQueryArrowTransportError {
#[error(transparent)]
Source(#[from] BigQuerySourceError),
#[error(transparent)]
Destination(#[from] ArrowDestinationError),
#[error(transparent)]
ConnectorX(#[from] crate::errors::ConnectorXError),
}
/// Convert BigQuery data types to Arrow data types.
pub struct BigQueryArrowTransport;
impl_transport!(
name = BigQueryArrowTransport,
error = BigQueryArrowTransportError,
systems = BigQueryTypeSystem => ArrowTypeSystem,
route = BigQuerySource => ArrowDestination,
mappings = {
{ Bool[bool] => Boolean[bool] | conversion auto }
{ Boolean[bool] => Boolean[bool] | conversion none }
{ Int64[i64] => Int64[i64] | conversion auto }
{ Integer[i64] => Int64[i64] | conversion none }
{ Float64[f64] => Float64[f64] | conversion auto }
{ Float[f64] => Float64[f64] | conversion none }
{ Numeric[f64] => Float64[f64] | conversion none }
{ Bignumeric[f64] => Float64[f64] | conversion none }
{ String[String] => LargeUtf8[String] | conversion auto }
{ Bytes[String] => LargeUtf8[String] | conversion none }
{ Date[NaiveDate] => Date32[NaiveDate] | conversion auto }
{ Datetime[NaiveDateTime] => Date64[NaiveDateTime] | conversion auto }
{ Time[NaiveTime] => Time64[NaiveTime] | conversion auto }
{ Timestamp[DateTime<Utc>] => DateTimeTz[DateTime<Utc>] | conversion auto }
}
);
|
use core::parser::IrcMessage;
use core::net::TcpWriter;
use std::thread;
use std::collections::HashMap;
use chan;
use libloading::{Library, Symbol};
pub type IrcFn = fn(&mut TcpWriter, &IrcMessage);
pub struct HandlerThread<'ht> {
writer: TcpWriter,
receiver: chan::Receiver<IrcMessage>,
pub handlers: HashMap<&'ht str, Vec<IrcFn>>,
libraries: Vec<Library>
}
impl<'ht> HandlerThread<'ht> {
pub fn add_handler(&mut self, method: &'ht str, func: IrcFn) {
let vector = self.handlers.entry(method)
.or_insert(Vec::new());
vector.push(func);
}
pub fn load_plugin(&mut self, name: &str) {
unsafe {
let lib = Library::new(format!("lib{}.so", name)).unwrap();
{
let initialize: Symbol<extern fn(&mut HandlerThread) -> ()> =
lib.get(b"initialize\0").unwrap(); // TODO: handle case for incorrect lib
initialize(self);
}
self.libraries.push(lib);
}
}
pub fn run(&mut self) {
self.load_plugin("say");
loop {
let message: IrcMessage = self.receiver.recv().unwrap();
match message.method.as_ref() {
"376" | "422" => { self.writer.write("JOIN #bots"); },
_ => ()
}
if let Some(handlers) = self.handlers.get(&*message.method) {
for handler in handlers.iter() {
handler(&mut self.writer, &message);
}
}
}
}
}
pub fn new_thread(writer: TcpWriter, receiver: chan::Receiver<IrcMessage>) {
let mut thread = HandlerThread {
writer: writer,
receiver: receiver,
handlers: HashMap::new(),
libraries: Vec::new()
};
thread.run();
}
|
#[macro_use]
extern crate log;
pub mod core;
pub mod file;
|
use std::cmp::min;
fn main() {
let n: usize = {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().parse().unwrap()
};
let s: String = {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
};
let t: String = {
let mut line = String::new();
std::io::stdin().read_line(&mut line).unwrap();
line.trim_end().to_owned()
};
let stdout = solve(n, &s, &t);
stdout.iter().for_each(|s| {
println!("{}", s);
})
}
fn solve(n: usize, s: &str, t: &str) -> Vec<String> {
let mut match_size = min(t.len(), n);
loop {
if match_size == 0 || s[s.len() - match_size..] == t[..match_size] {
break;
}
match_size -= 1;
}
let mut buf = Vec::new();
buf.push(format!("{}", 2 * n - match_size));
buf
}
#[test]
fn test_solve_1() {
assert_eq!(solve(3, "abc", "cde"), vec!("5"));
}
#[test]
fn test_solve_2() {
assert_eq!(solve(1, "a", "z"), vec!("2"));
}
#[test]
fn test_solve_3() {
assert_eq!(solve(4, "expr", "expr"), vec!("4"));
}
#[test]
fn test_solve_4() {
assert_eq!(solve(4, "aaba", "abcd"), vec!("7"));
}
#[test]
fn test_solve_5() {
assert_eq!(solve(4, "aaba", "abad"), vec!("5"));
}
|
extern crate extended_collections;
extern crate rand;
use self::rand::{thread_rng, Rng};
use extended_collections::skiplist::SkipList;
use extended_collections::skiplist::SkipMap;
use std::vec::Vec;
const NUM_OF_OPERATIONS: usize = 100_000;
#[test]
fn int_test_skip_map() {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut map = SkipMap::new();
let mut expected = Vec::new();
for _ in 0..NUM_OF_OPERATIONS {
let key = rng.gen::<u32>();
let val = rng.gen::<u32>();
map.insert(key, val);
expected.push((key, val));
}
expected.reverse();
expected.sort_by(|l, r| l.0.cmp(&r.0));
expected.dedup_by_key(|pair| pair.0);
assert_eq!(map.len(), expected.len());
assert_eq!(map.min(), Some(&expected[0].0));
assert_eq!(map.max(), Some(&expected[expected.len() - 1].0));
for entry in &expected {
assert!(map.contains_key(&entry.0));
assert_eq!(map.get(&entry.0), Some(&entry.1));
assert_eq!(map.ceil(&entry.0), Some(&entry.0));
assert_eq!(map.floor(&entry.0), Some(&entry.0));
}
for entry in &mut expected {
let val_1 = rng.gen::<u32>();
let val_2 = rng.gen::<u32>();
let old_entry = map.insert(entry.0, val_1);
assert_eq!(old_entry, Some((entry.0, entry.1)));
{
let old_val = map.get_mut(&entry.0);
*old_val.unwrap() = val_2;
}
entry.1 = val_2;
assert_eq!(map.get(&entry.0), Some(&val_2));
}
thread_rng().shuffle(&mut expected);
let mut expected_len = expected.len();
for entry in expected {
let old_entry = map.remove(&entry.0);
expected_len -= 1;
assert_eq!(old_entry, Some((entry.0, entry.1)));
assert_eq!(map.len(), expected_len);
}
}
#[test]
fn int_test_skip_list() {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut list = SkipList::new();
let mut expected = Vec::new();
for i in 0..NUM_OF_OPERATIONS {
let index = rng.gen_range(0, i + 1);
let val = rng.gen::<u32>();
list.insert(index, val);
expected.insert(index, val);
}
assert_eq!(list.len(), expected.len());
assert_eq!(
list.iter().collect::<Vec<&u32>>(),
expected.iter().collect::<Vec<&u32>>(),
);
for i in (0..NUM_OF_OPERATIONS).rev() {
let index = rng.gen_range(0, i + 1);
let val = rng.gen::<u32>();
list[index] = val;
expected[index] = val;
assert_eq!(list[index], expected[index]);
assert_eq!(list.remove(index), expected.remove(index));
}
}
|
use crate::{prelude::*, sql::CXQuery};
use arrow::record_batch::RecordBatch;
use datafusion::datasource::MemTable;
use datafusion::prelude::*;
use fehler::throws;
use log::debug;
use rayon::prelude::*;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::sync::{mpsc::channel, Arc};
#[throws(ConnectorXOutError)]
pub fn run(
sql: String,
db_map: HashMap<String, String>,
j4rs_base: Option<&str>,
) -> Vec<RecordBatch> {
debug!("federated input sql: {}", sql);
let mut db_conn_map: HashMap<String, FederatedDataSourceInfo> = HashMap::new();
for (k, v) in db_map.into_iter() {
db_conn_map.insert(
k,
FederatedDataSourceInfo::new_from_conn_str(SourceConn::try_from(v.as_str())?, false),
);
}
let fed_plan = rewrite_sql(sql.as_str(), &db_conn_map, j4rs_base)?;
debug!("fetch queries from remote");
let (sender, receiver) = channel();
fed_plan.into_par_iter().enumerate().try_for_each_with(
sender,
|s, (i, p)| -> Result<(), ConnectorXOutError> {
match p.db_name.as_str() {
"LOCAL" => {
s.send((p.sql, None)).expect("send error local");
}
_ => {
debug!("start query {}: {}", i, p.sql);
let queries = [CXQuery::naked(p.sql)];
let source_conn = &db_conn_map[p.db_name.as_str()]
.conn_str_info
.as_ref()
.unwrap();
let destination = get_arrow(source_conn, None, &queries)?;
let rbs = destination.arrow()?;
let provider = MemTable::try_new(rbs[0].schema(), vec![rbs])?;
s.send((p.db_alias, Some(Arc::new(provider))))
.expect(&format!("send error {}", i));
debug!("query {} finished", i);
}
}
Ok(())
},
)?;
let ctx = SessionContext::new();
let mut alias_names: Vec<String> = vec![];
let mut local_sql = String::new();
receiver
.iter()
.try_for_each(|(alias, provider)| -> Result<(), ConnectorXOutError> {
match provider {
Some(p) => {
ctx.register_table(alias.as_str(), p)?;
alias_names.push(alias);
}
None => local_sql = alias,
}
Ok(())
})?;
debug!("\nexecute query final...");
let rt = Arc::new(tokio::runtime::Runtime::new().expect("Failed to create runtime"));
// until datafusion fix the bug: https://github.com/apache/arrow-datafusion/issues/2147
for alias in alias_names {
local_sql = local_sql.replace(format!("\"{}\"", alias).as_str(), alias.as_str());
}
let df = rt.block_on(ctx.sql(local_sql.as_str()))?;
rt.block_on(df.collect())?
}
|
// Copyright 2018 Fredrik Portström <https://portstrom.com>
// This is free software distributed under the terms specified in
// the file LICENSE at the top-level directory of this distribution.
use parse_wiki_text::Positioned;
pub struct Context<'a> {
pub language: Option<::Language>,
pub warnings: Vec<::Warning>,
pub wiki_text: &'a str,
}
pub fn add_warning(context: &mut Context, node: &impl Positioned, message: ::WarningMessage) {
// This panics when accidentally making an infinite loop that produces warnings. This sometimes happens during development. In release builds, loops are assumed to already be tested and work properly.
debug_assert!(context.warnings.len() < 10000);
context.warnings.push(::Warning {
end: node.end(),
language: context.language,
message,
start: node.start(),
});
}
#[must_use]
pub fn create_unknown<'a>(
context: &mut Context<'a>,
node: &::Node,
warning_message: ::WarningMessage,
) -> ::Flowing<'a> {
create_unknown2(context, node, node, warning_message)
}
#[must_use]
pub fn create_unknown2<'a>(
context: &mut Context<'a>,
unknown_node: &::Node,
warning_node: &impl Positioned,
warning_message: ::WarningMessage,
) -> ::Flowing<'a> {
add_warning(context, warning_node, warning_message);
::Flowing::Unknown {
value: ::Cow::Borrowed(&context.wiki_text[unknown_node.start()..unknown_node.end()]),
}
}
#[must_use]
pub fn parse_link<'a>(
context: &mut Context<'a>,
node: &::Node,
target: &'a str,
text: &[::Node<'a>],
) -> ::Flowing<'a> {
match parse_text(text) {
None => create_unknown(context, node, ::WarningMessage::ValueUnrecognized),
Some(text) => ::Flowing::Link {
target: ::Cow::Borrowed(target),
text,
},
}
}
#[must_use]
pub fn parse_list_items_generic<'a, T>(
context: &mut Context<'a>,
template_node: &::Node,
parameters: &[::Parameter],
nodes: &[::Node<'a>],
output: &mut Option<Vec<T>>,
mut parse_list_item: impl FnMut(&mut Context<'a>, &::DefinitionListItem<'a>) -> Option<T>,
) -> usize {
parse_list_generic(
context,
template_node,
parameters,
nodes,
output,
|context, items| {
items
.iter()
.filter_map(|item| {
if item.type_ == ::Details {
parse_list_item(context, item)
} else {
add_warning(context, item, ::WarningMessage::Unrecognized);
None
}
})
.collect()
},
)
}
#[must_use]
pub fn parse_list_generic<'a, T: Default>(
context: &mut Context<'a>,
template_node: &::Node,
parameters: &[::Parameter],
nodes: &[::Node<'a>],
output: &mut Option<T>,
mut parse_list: impl FnMut(&mut Context<'a>, &[::DefinitionListItem<'a>]) -> T,
) -> usize {
if output.is_some() {
*output = Some(Default::default());
add_warning(context, template_node, ::WarningMessage::Duplicate);
return 0;
}
if !parameters.is_empty() {
*output = Some(Default::default());
add_warning(context, template_node, ::WarningMessage::ValueUnrecognized);
return 0;
}
if let Some(::Node::DefinitionList { items, .. }) = nodes.get(0) {
*output = Some(parse_list(context, items));
return 1;
}
*output = Some(Default::default());
add_warning(context, template_node, ::WarningMessage::SectionEmpty);
0
}
#[must_use]
pub fn parse_parameter_name<'a>(parameter: &::Parameter<'a>) -> Option<&'a str> {
parameter
.name
.as_ref()
.and_then(|nodes| match nodes.as_slice() {
[::Node::Text { value, .. }] => Some(*value),
_ => None,
})
}
#[must_use]
pub fn parse_simple_template<'a>(
context: &mut Context<'a>,
node: &::Node,
parameters: &[::Parameter],
output: ::Flowing<'a>,
) -> ::Flowing<'a> {
if parameters.is_empty() {
output
} else {
create_unknown(context, node, ::WarningMessage::ValueUnrecognized)
}
}
#[must_use]
pub fn parse_text<'a>(nodes: &[::Node<'a>]) -> Option<::Cow<'a, str>> {
match nodes {
[] => Some(::Cow::Borrowed("")),
[::Node::Text { value, .. }] => Some(::Cow::Borrowed(value)),
_ => nodes
.iter()
.map(|node| match node {
::Node::CharacterEntity { character, .. } => Some(character.to_string()),
::Node::Text { value, .. } => Some(value.to_string()),
_ => None,
})
.collect::<Option<String>>()
.map(::Cow::Owned),
}
}
#[must_use]
pub fn parse_text_not_empty<'a>(nodes: &[::Node<'a>]) -> Option<::Cow<'a, str>> {
parse_text(nodes).filter(|text| !text.is_empty())
}
#[must_use]
pub fn text_equals(nodes: &[::Node], text: &str) -> bool {
match parse_text(nodes) {
None => false,
Some(value) => value == text,
}
}
|
extern crate jni;
use jni::{
objects::JClass,
sys::jint,
JNIEnv
};
/// Returns the square of integer.
fn sqr_internal(a: i32) -> i32 {
a * a
}
/// Returns result of division of two integers. Exported function, `no_mangle` is required.
#[no_mangle]
pub fn div(a: i32, b: i32) -> i32 {
a / b
}
// **********************************
// JNI API
// **********************************
#[no_mangle]
pub extern "system" fn Java_Main_sqr(_env: JNIEnv, _class: JClass, a: jint) -> jint {
sqr_internal(a)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn third_div() {
assert_eq!(div(42, 6), 7);
}
#[test]
fn third_sqr() {
assert_eq!(sqr_internal(0), 0);
assert_eq!(sqr_internal(1), 1);
assert_eq!(sqr_internal(2), 4);
assert_eq!(sqr_internal(4), 16);
}
}
|
fn next_perm(vect: &mut Vec<usize>) {
let mut i = vect.len() - 1; // pivot-to-be (index)
while i > 0 && vect[i - 1] >= vect[i] {
i -= 1;
}
if i == 0 {
panic!("last permutation!");
}
// smallest larger than pivot (index)
// inside the suffix
let mut j = vect.len() - 1;
// walks the suffix
while vect[j] <= vect[i - 1] {
// if current number larger than pivot
// but still minimum value
j -= 1;
}
// swap pivot with smallest larger than pivot
vect.swap(j, i - 1);
vect[i..].reverse();
}
fn main() {
let mut a: Vec<usize> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
for _ in 0..1_000_000 - 1 {
next_perm(&mut a);
}
println!("{:?}", a);
} |
#[doc = "Reader of register SW_HS_N_SEL"]
pub type R = crate::R<u32, super::SW_HS_N_SEL>;
#[doc = "Writer for register SW_HS_N_SEL"]
pub type W = crate::W<u32, super::SW_HS_N_SEL>;
#[doc = "Register SW_HS_N_SEL `reset()`'s with value 0"]
impl crate::ResetValue for super::SW_HS_N_SEL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SW_HCCC`"]
pub type SW_HCCC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SW_HCCC`"]
pub struct SW_HCCC_W<'a> {
w: &'a mut W,
}
impl<'a> SW_HCCC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `SW_HCCD`"]
pub type SW_HCCD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SW_HCCD`"]
pub struct SW_HCCD_W<'a> {
w: &'a mut W,
}
impl<'a> SW_HCCD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `SW_HCRH`"]
pub type SW_HCRH_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SW_HCRH`"]
pub struct SW_HCRH_W<'a> {
w: &'a mut W,
}
impl<'a> SW_HCRH_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 24)) | (((value as u32) & 0x07) << 24);
self.w
}
}
#[doc = "Reader of field `SW_HCRL`"]
pub type SW_HCRL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SW_HCRL`"]
pub struct SW_HCRL_W<'a> {
w: &'a mut W,
}
impl<'a> SW_HCRL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 28)) | (((value as u32) & 0x07) << 28);
self.w
}
}
impl R {
#[doc = "Bit 16 - Set corresponding switch"]
#[inline(always)]
pub fn sw_hccc(&self) -> SW_HCCC_R {
SW_HCCC_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 20 - Set corresponding switch"]
#[inline(always)]
pub fn sw_hccd(&self) -> SW_HCCD_R {
SW_HCCD_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bits 24:26 - Select waveform for corresponding switch"]
#[inline(always)]
pub fn sw_hcrh(&self) -> SW_HCRH_R {
SW_HCRH_R::new(((self.bits >> 24) & 0x07) as u8)
}
#[doc = "Bits 28:30 - Select waveform for corresponding switch"]
#[inline(always)]
pub fn sw_hcrl(&self) -> SW_HCRL_R {
SW_HCRL_R::new(((self.bits >> 28) & 0x07) as u8)
}
}
impl W {
#[doc = "Bit 16 - Set corresponding switch"]
#[inline(always)]
pub fn sw_hccc(&mut self) -> SW_HCCC_W {
SW_HCCC_W { w: self }
}
#[doc = "Bit 20 - Set corresponding switch"]
#[inline(always)]
pub fn sw_hccd(&mut self) -> SW_HCCD_W {
SW_HCCD_W { w: self }
}
#[doc = "Bits 24:26 - Select waveform for corresponding switch"]
#[inline(always)]
pub fn sw_hcrh(&mut self) -> SW_HCRH_W {
SW_HCRH_W { w: self }
}
#[doc = "Bits 28:30 - Select waveform for corresponding switch"]
#[inline(always)]
pub fn sw_hcrl(&mut self) -> SW_HCRL_W {
SW_HCRL_W { w: self }
}
}
|
//! Module to control shutdown in lading.
//!
//! Lading manages at least one sub-process, possibly two and must coordinate
//! shutdown with an experimental regime in addition to the target sub-process'
//! potential failures. Controlling shutdown is the responsibility of the code
//! in this module, specifically [`Shutdown`].
use std::sync::Arc;
use tokio::{
sync::broadcast,
time::{interval, Duration},
};
use tracing::{error, info};
#[derive(Debug)]
/// Errors produced by [`Shutdown`]
pub enum Error {
/// The mechanism underlaying [`Shutdown`] failed catastrophically.
Tokio(broadcast::error::SendError<()>),
}
#[derive(Debug)]
/// Mechanism to control shutdown in lading.
///
/// Lading will shutdown for two reasons: the experimental time set by the user
/// has elapsed or the target sub-process has exited too soon. Everything in
/// lading that participates in controlled shutdown does so by having a clone of
/// this struct.
pub struct Shutdown {
/// The broadcast sender, signleton for all `Shutdown` instances derived
/// from the same root `Shutdown`.
sender: Arc<broadcast::Sender<()>>,
/// The receive half of the channel used to listen for shutdown. One per
/// instance.
notify: broadcast::Receiver<()>,
/// `true` if the shutdown signal has been received
shutdown: bool,
}
impl Default for Shutdown {
fn default() -> Self {
Self::new()
}
}
impl Shutdown {
/// Create a new `Shutdown` instance. There should be only one call to this
/// function and all subsequent instances should be created through clones.
#[must_use]
pub fn new() -> Self {
let (shutdown_snd, shutdown_rcv) = broadcast::channel(1);
Self {
sender: Arc::new(shutdown_snd),
notify: shutdown_rcv,
shutdown: false,
}
}
/// Receive the shutdown notice. This function will block if a notice has
/// not already been sent.
pub async fn recv(&mut self) {
// If the shutdown signal has already been received, then return
// immediately.
if self.shutdown {
return;
}
// Cannot receive a "lag error" as only one value is ever sent.
let _ = self.notify.recv().await;
// Remember that the signal has been received.
self.shutdown = true;
}
/// Send the shutdown signal through to this and all derived `Shutdown`
/// instances. Returns the number of active intances, or error.
///
/// # Errors
///
/// Function will return an error if the underlying tokio broadcast
/// mechanism fails.
pub fn signal(&self) -> Result<usize, Error> {
self.sender.send(()).map_err(Error::Tokio)
}
/// Wait for all `Shutdown` instances to properly shut down. This function
/// is safe to call from multiple instances of a `Shutdown`.
///
/// # Panics
///
/// None known.
pub async fn wait(self, max_delay: Duration) {
// Tidy up our own `notify`, avoiding a situation where we infinitely wait
// to shut down.
drop(self.notify);
let mut check_pulse = interval(Duration::from_secs(1));
let mut max_delay = interval(max_delay);
// Move past the first delay. If we fail to avoid this 0th interval the
// program shuts down with an error incorrectly.
max_delay.tick().await;
loop {
tokio::select! {
_ = check_pulse.tick() => {
let remaining: usize = self.sender.receiver_count();
if remaining == 0 {
info!("all tasks shut down");
return;
}
// For reasons that are obscure to me if we sleep here it's
// _possible_ for the runtime to fully lock up when the splunk_heck
// -- at least -- generator is running. See note below. This only
// seems to happen if we have a single-threaded runtime or a low
// number of worker threads available. I've reproduced the issue
// reliably with 2.
info!("waiting for {} tasks to shutdown", remaining);
}
_ = max_delay.tick() => {
let remaining: usize = self.sender.receiver_count();
error!("shutdown wait completing with {} remaining tasks", remaining);
return;
}
}
}
}
}
impl Clone for Shutdown {
fn clone(&self) -> Self {
let notify = self.sender.subscribe();
Self {
shutdown: self.shutdown,
notify,
sender: Arc::clone(&self.sender),
}
}
}
|
#![deny(clippy::all, clippy::pedantic)]
use unicode_segmentation::UnicodeSegmentation;
pub fn reverse(input: &str) -> String {
input.graphemes(true).rev().collect::<String>()
}
|
use std::{collections::VecDeque, iter::FromIterator};
#[derive(Debug)]
struct LazyBufferEdge<T> {
// index of left-most element in *resulting* buffer
left_idx: usize,
// size of this piece of buffer (including sizes of kids)
size: usize,
vertex: Box<LazyBuffer<T>>,
}
#[derive(Debug)]
struct LazyBuffer<T> {
buf: VecDeque<T>,
kids: Vec<LazyBufferEdge<T>>,
}
impl<T: std::fmt::Debug> LazyBuffer<T> {
fn new<B>(buf: B) -> Self
where
B: IntoIterator<Item = T>,
{
Self {
buf: VecDeque::from_iter(buf),
kids: Default::default(),
}
}
fn insert<I>(&mut self, target_idx: usize, data: I)
where
I: Iterator<Item = T> + ExactSizeIterator,
{
let candidate_idx = self
.kids
.binary_search_by(|edge| edge.left_idx.cmp(&target_idx));
match candidate_idx {
Ok(exact_idx) => {
self.kids[exact_idx].size += data.len();
self.kids.get_mut(exact_idx + 1..).map(|sl| {
sl.iter_mut().for_each(|edge| edge.left_idx += data.len())
});
self.kids[exact_idx].vertex.insert(0, data);
}
Err(0) => {
self.kids
.iter_mut()
.for_each(|edge| edge.left_idx += data.len());
self.kids.insert(
0,
LazyBufferEdge {
left_idx: target_idx,
size: data.len(),
vertex: Box::new(LazyBuffer::new(data)),
},
);
}
// approx_idx > 0
Err(approx_idx) => {
self.kids.get_mut(approx_idx..).map(|sl| {
sl.iter_mut().for_each(|edge| edge.left_idx += data.len())
});
let parent = &mut self.kids[approx_idx - 1];
if parent.left_idx + parent.size <= target_idx {
// parent is too "left"
self.kids.insert(
approx_idx,
LazyBufferEdge {
left_idx: target_idx,
size: data.len(),
vertex: Box::new(LazyBuffer::new(data)),
},
);
} else {
parent.size += data.len();
parent.vertex.insert(target_idx - parent.left_idx, data);
}
}
}
}
}
fn main() {
let mut lb: LazyBuffer<u8> =
LazyBuffer::new("Hello world!".to_owned().into_bytes());
lb.insert(11, " war".bytes());
lb.insert(4, " n".bytes());
// lb.apply();
println!("{:#?}", lb);
}
|
#[test]
fn rounding_total_fractial() {
let layout = stretch::node::Node::new(
stretch::style::Style {
flex_direction: stretch::style::FlexDirection::Column,
size: stretch::geometry::Size {
width: stretch::style::Dimension::Points(87.4f32),
height: stretch::style::Dimension::Points(113.4f32),
..Default::default()
},
..Default::default()
},
vec![
&stretch::node::Node::new(
stretch::style::Style {
flex_grow: 0.7f32,
flex_basis: stretch::style::Dimension::Points(50.3f32),
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(20.3f32),
..Default::default()
},
..Default::default()
},
vec![],
),
&stretch::node::Node::new(
stretch::style::Style {
flex_grow: 1.6f32,
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(10f32),
..Default::default()
},
..Default::default()
},
vec![],
),
&stretch::node::Node::new(
stretch::style::Style {
flex_grow: 1.1f32,
size: stretch::geometry::Size {
height: stretch::style::Dimension::Points(10.7f32),
..Default::default()
},
..Default::default()
},
vec![],
),
],
)
.compute_layout(stretch::geometry::Size::undefined())
.unwrap();
assert_eq!(layout.size.width, 87f32);
assert_eq!(layout.size.height, 113f32);
assert_eq!(layout.location.x, 0f32);
assert_eq!(layout.location.y, 0f32);
assert_eq!(layout.children[0usize].size.width, 87f32);
assert_eq!(layout.children[0usize].size.height, 59f32);
assert_eq!(layout.children[0usize].location.x, 0f32);
assert_eq!(layout.children[0usize].location.y, 0f32);
assert_eq!(layout.children[1usize].size.width, 87f32);
assert_eq!(layout.children[1usize].size.height, 30f32);
assert_eq!(layout.children[1usize].location.x, 0f32);
assert_eq!(layout.children[1usize].location.y, 59f32);
assert_eq!(layout.children[2usize].size.width, 87f32);
assert_eq!(layout.children[2usize].size.height, 24f32);
assert_eq!(layout.children[2usize].location.x, 0f32);
assert_eq!(layout.children[2usize].location.y, 89f32);
}
|
#[doc = "Register `RCC_LPTIM1CKSELR` reader"]
pub type R = crate::R<RCC_LPTIM1CKSELR_SPEC>;
#[doc = "Register `RCC_LPTIM1CKSELR` writer"]
pub type W = crate::W<RCC_LPTIM1CKSELR_SPEC>;
#[doc = "Field `LPTIM1SRC` reader - LPTIM1SRC"]
pub type LPTIM1SRC_R = crate::FieldReader;
#[doc = "Field `LPTIM1SRC` writer - LPTIM1SRC"]
pub type LPTIM1SRC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
impl R {
#[doc = "Bits 0:2 - LPTIM1SRC"]
#[inline(always)]
pub fn lptim1src(&self) -> LPTIM1SRC_R {
LPTIM1SRC_R::new((self.bits & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - LPTIM1SRC"]
#[inline(always)]
#[must_use]
pub fn lptim1src(&mut self) -> LPTIM1SRC_W<RCC_LPTIM1CKSELR_SPEC, 0> {
LPTIM1SRC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to control the selection of the kernel clock for the LPTIM1 block.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_lptim1ckselr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_lptim1ckselr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_LPTIM1CKSELR_SPEC;
impl crate::RegisterSpec for RCC_LPTIM1CKSELR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_lptim1ckselr::R`](R) reader structure"]
impl crate::Readable for RCC_LPTIM1CKSELR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_lptim1ckselr::W`](W) writer structure"]
impl crate::Writable for RCC_LPTIM1CKSELR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_LPTIM1CKSELR to value 0"]
impl crate::Resettable for RCC_LPTIM1CKSELR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::node::Node;
use anyhow::{bail, Context, Result};
use bit_vec::BitVec;
use std::cmp::Reverse;
use std::collections::{BinaryHeap, HashMap, VecDeque};
use std::fs::File;
use std::io::prelude::*;
use std::io::{BufReader, BufWriter, ErrorKind, SeekFrom};
use std::path::Path;
const BUFFER_SIZE: usize = 4096;
type BitMap = HashMap<u8, BitVec>;
pub fn compress(file_name: &str) -> Result<()> {
let in_path = Path::new(file_name);
let in_file = File::open(&in_path).with_context(|| format!("Error opening `{}`", file_name))?;
// Get the "bitmap" from the file, based on byte counts
let mut buffer = BufReader::new(in_file);
let counts = get_counts(buffer.by_ref()).with_context(|| "Error compressing file")?;
let heap = counts_to_heap(counts);
let root = heap_to_tree(heap).with_context(|| "Error compressing file")?;
// Write out a new file with the "bitmap"
buffer.seek(SeekFrom::Start(0))?;
let out_file_name = format!("{}.huf", file_name);
let out_path = Path::new(&out_file_name);
let out_file = File::create(&out_path)
.with_context(|| format!("Error opening `{}` for writing", file_name))?;
write(out_file, root, buffer).with_context(|| "Error compressing file")?;
Ok(())
}
fn get_counts(mut buffer: impl Read) -> Result<[usize; 256]> {
let mut counts = [0; 256];
let mut bytes = [0; BUFFER_SIZE];
loop {
match buffer.read(&mut bytes) {
Ok(0) => break,
Ok(n) => {
for i in 0..n {
counts[bytes[i] as usize] += 1;
}
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => bail!("{:?}", e),
};
}
Ok(counts)
}
fn counts_to_heap(counts: [usize; 256]) -> BinaryHeap<Reverse<Node>> {
let mut heap = BinaryHeap::new();
for (idx, count) in counts.iter().enumerate() {
if count > &0 {
heap.push(Reverse(Node {
value: Some(idx as u8),
weight: *count,
left: None,
right: None,
}));
}
}
heap
}
fn heap_to_tree(mut heap: BinaryHeap<Reverse<Node>>) -> Result<Node> {
while heap.len() > 1 {
let Reverse(a) = heap.pop().unwrap();
let Reverse(b) = heap.pop().unwrap();
let node = Node {
value: None,
weight: a.weight + b.weight,
left: Some(Box::new(a)),
right: Some(Box::new(b)),
};
heap.push(Reverse(node));
}
if let Some(Reverse(head)) = heap.pop() {
Ok(head)
} else {
bail!("Unable to convert heap to tree")
}
}
fn tree_to_bit_hash_map(head: Node) -> BitMap {
let mut queue = VecDeque::new();
let mut map = HashMap::new();
let bit_vec = BitVec::new();
queue.push_back((head, bit_vec));
while !queue.is_empty() {
let (node, bits) = queue.pop_front().unwrap();
if let Some(value) = node.value {
map.insert(value, bits);
} else {
if let Some(left) = node.left {
let mut left_bits = bits.clone();
left_bits.push(false);
queue.push_back((*left, left_bits));
}
if let Some(right) = node.right {
let mut right_bits = bits.clone();
right_bits.push(true);
queue.push_back((*right, right_bits));
}
}
}
map
}
fn write(file: File, root: Node, mut buffer: impl Read) -> Result<()> {
let mut bytes = [0; BUFFER_SIZE];
let mut stream = BufWriter::new(file);
// Start the output with the exported tree
let mut bit_vec = root.export();
let bitmap = tree_to_bit_hash_map(root);
loop {
match buffer.read(&mut bytes) {
Ok(0) => break,
Ok(_) => {
for byte in bytes {
if byte == 0 {
continue;
}
let bits = &bitmap[&byte];
bit_vec.extend(bits);
}
let remainder = bit_vec.len() % 8;
let new_bit_vec = bit_vec.split_off(bit_vec.len() - remainder);
stream.write_all(&bit_vec.to_bytes())?;
bit_vec.truncate(0);
bit_vec.extend(new_bit_vec);
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => bail!("{:?}", e),
};
}
if !bit_vec.is_empty() {
stream.write_all(&bit_vec.to_bytes())?;
}
stream.flush()?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
#[test]
fn test_get_counts_no_bytes() {
let file = Vec::new();
let cursor = Cursor::new(file);
let buffer = BufReader::new(cursor);
let counts = get_counts(buffer).unwrap();
let expected = [0; 256];
assert_eq!(counts, expected);
}
#[test]
fn test_get_counts_few_bytes() {
let file = Vec::from([0, 255, 0, 5, 255, 0]);
let cursor = Cursor::new(file);
let buffer = BufReader::new(cursor);
let counts = get_counts(buffer).unwrap();
let mut expected = [0; 256];
expected[0] = 3;
expected[255] = 2;
expected[5] = 1;
assert_eq!(counts, expected);
}
#[test]
fn test_counts_to_queue_empty_queue() {
let counts = [0; 256];
let heap = counts_to_heap(counts);
assert_eq!(heap.len(), 0);
}
#[test]
fn test_counts_to_single_value() {
let mut counts = [0; 256];
counts[0] = 10;
let mut heap = counts_to_heap(counts);
assert_eq!(heap.len(), 1);
let Reverse(node) = heap.pop().unwrap();
assert_eq!(node.value, Some(0));
assert_eq!(node.weight, 10);
}
#[test]
fn test_counts_to_multiple_values_has_lowest_weight_first() {
let mut counts = [0; 256];
counts[0] = 10;
counts[1] = 3;
counts[2] = 25;
let mut heap = counts_to_heap(counts);
assert_eq!(heap.len(), 3);
let Reverse(node) = heap.pop().unwrap();
assert_eq!(node.value, Some(1));
assert_eq!(node.weight, 3);
let Reverse(node) = heap.pop().unwrap();
assert_eq!(node.value, Some(0));
assert_eq!(node.weight, 10);
let Reverse(node) = heap.pop().unwrap();
assert_eq!(node.value, Some(2));
assert_eq!(node.weight, 25);
}
#[test]
fn test_heap_to_tree_with_no_values() {
let heap = BinaryHeap::new();
let head = heap_to_tree(heap).err().unwrap();
assert_eq!(head.to_string(), "Unable to convert heap to tree");
}
#[test]
fn test_heap_to_tree_with_one_node() {
let mut heap = BinaryHeap::new();
heap.push(Reverse(Node {
value: Some(5),
weight: 10,
left: None,
right: None,
}));
let head = heap_to_tree(heap);
assert_eq!(head.unwrap().value, Some(5));
}
#[test]
fn test_heap_to_tree_with_with_multiple_values() {
let mut heap = BinaryHeap::new();
heap.push(Reverse(Node {
value: Some(5),
weight: 10,
left: None,
right: None,
}));
heap.push(Reverse(Node {
value: Some(1),
weight: 3,
left: None,
right: None,
}));
heap.push(Reverse(Node {
value: Some(2),
weight: 6,
left: None,
right: None,
}));
heap.push(Reverse(Node {
value: Some(9),
weight: 8,
left: None,
right: None,
}));
/* Expected tree:
*
* *:27
* / \
* 5:10 *:17
* / \
* 9:8 *:9
* / \
* 1:3 2:6
*/
let head = heap_to_tree(heap).unwrap();
assert_eq!(head.value, None);
assert_eq!(head.weight, 27);
let head_left = head.left.unwrap();
assert_eq!(head_left.value, Some(5));
assert_eq!(head_left.weight, 10);
let head_right = head.right.unwrap();
assert_eq!(head_right.value, None);
assert_eq!(head_right.weight, 17);
let head_right_left = head_right.left.unwrap();
assert_eq!(head_right_left.value, Some(9));
assert_eq!(head_right_left.weight, 8);
let head_right_right = head_right.right.unwrap();
assert_eq!(head_right_right.value, None);
assert_eq!(head_right_right.weight, 9);
let head_right_right_left = head_right_right.left.unwrap();
assert_eq!(head_right_right_left.value, Some(1));
assert_eq!(head_right_right_left.weight, 3);
let head_right_right_right = head_right_right.right.unwrap();
assert_eq!(head_right_right_right.value, Some(2));
assert_eq!(head_right_right_right.weight, 6);
}
#[test]
fn test_tree_to_bit_hash_map() {
let head = Node {
value: None,
weight: 27,
left: Some(Box::new(Node {
value: Some(3),
weight: 17,
left: None,
right: None,
})),
right: Some(Box::new(Node {
value: Some(8),
weight: 10,
left: None,
right: None,
})),
};
let bitmap = tree_to_bit_hash_map(head);
assert!(bitmap.get(&3u8).unwrap().eq_vec(&[false]));
assert!(bitmap.get(&8u8).unwrap().eq_vec(&[true]));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.