text stringlengths 8 4.13M |
|---|
// https://rustcc.gitbooks.io/rustprimer/content/module/module.html
pub mod a;
fn main() {
println!("Hello, world!");
a::b::c::d::print_ddd();
test_reexport();
test_use();
}
fn test_reexport() {
a::d::print_ddd();
}
fn test_use() {
use a::b::c::d;
d::print_ddd();
}
|
#![no_std]
pub struct And<A, B>(pub A, pub B);
macro_rules! define {
( $( pub struct $name:ident / $lower:ident ( pub $ty:ty ) ; )* ) => {
$(
#[repr(transparent)]
#[derive(Clone)]
pub struct $name(pub $ty);
pub fn $lower(target: &$ty) -> &$name {
// unsafe { ::core::mem::transmute(target) }
unsafe { &*(target as *const $ty as *const $name) }
}
)*
};
}
pub mod sha2;
pub mod hmac;
pub mod poly1305;
pub mod chacha20;
pub mod salsa20;
pub mod chacha20poly1305;
pub mod ed25519;
pub mod curve25519;
pub mod nacl;
|
// Variables in rust are immutable by default
// Rust is a block-scoped language
pub fn run() {
let name = "Haardik";
// The following will lead to an error
// cannot assign twice to immutable variable
// let age = 21;
// age = 22
let mut age = 21;
println!("My name is {} and I am {}", name, age);
age = 22;
println!("My name is {} and I am {}", name, age);
// Define constants
// need to explicitly define a type
const ID: i32 = 001;
println!("ID: {}", ID);
// Assign multiple variables
let (my_name, my_age) = ("Haardik", 21);
println!("{} is {}", my_name, my_age);
println!("-----------------------");
}
|
#[doc = "Register `EXTSCR` reader"]
pub type R = crate::R<EXTSCR_SPEC>;
#[doc = "Register `EXTSCR` writer"]
pub type W = crate::W<EXTSCR_SPEC>;
#[doc = "Clear CPU1 Stop Standby flags\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum C1CSSFW_AW {
#[doc = "1: Setting this bit clears the C1STOPF and C1SBF bits"]
Clear = 1,
}
impl From<C1CSSFW_AW> for bool {
#[inline(always)]
fn from(variant: C1CSSFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `C1CSSF` writer - Clear CPU1 Stop Standby flags"]
pub type C1CSSF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, C1CSSFW_AW>;
impl<'a, REG, const O: u8> C1CSSF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Setting this bit clears the C1STOPF and C1SBF bits"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(C1CSSFW_AW::Clear)
}
}
#[doc = "Field `C1SBF` reader - System Standby flag for CPU1. (no core states retained)"]
pub type C1SBF_R = crate::BitReader<C1SBF_A>;
#[doc = "System Standby flag for CPU1. (no core states retained)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum C1SBF_A {
#[doc = "0: System has not been in Standby mode"]
NoStandby = 0,
#[doc = "1: System has been in Standby mode"]
Standby = 1,
}
impl From<C1SBF_A> for bool {
#[inline(always)]
fn from(variant: C1SBF_A) -> Self {
variant as u8 != 0
}
}
impl C1SBF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> C1SBF_A {
match self.bits {
false => C1SBF_A::NoStandby,
true => C1SBF_A::Standby,
}
}
#[doc = "System has not been in Standby mode"]
#[inline(always)]
pub fn is_no_standby(&self) -> bool {
*self == C1SBF_A::NoStandby
}
#[doc = "System has been in Standby mode"]
#[inline(always)]
pub fn is_standby(&self) -> bool {
*self == C1SBF_A::Standby
}
}
#[doc = "Field `C1STOP2F` reader - System Stop2 flag for CPU1. (partial core states retained)"]
pub type C1STOP2F_R = crate::BitReader<C1STOP2F_A>;
#[doc = "System Stop2 flag for CPU1. (partial core states retained)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum C1STOP2F_A {
#[doc = "0: System has not been in Stop 2 mode"]
NoStop = 0,
#[doc = "1: System has been in Stop 2 mode"]
Stop = 1,
}
impl From<C1STOP2F_A> for bool {
#[inline(always)]
fn from(variant: C1STOP2F_A) -> Self {
variant as u8 != 0
}
}
impl C1STOP2F_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> C1STOP2F_A {
match self.bits {
false => C1STOP2F_A::NoStop,
true => C1STOP2F_A::Stop,
}
}
#[doc = "System has not been in Stop 2 mode"]
#[inline(always)]
pub fn is_no_stop(&self) -> bool {
*self == C1STOP2F_A::NoStop
}
#[doc = "System has been in Stop 2 mode"]
#[inline(always)]
pub fn is_stop(&self) -> bool {
*self == C1STOP2F_A::Stop
}
}
#[doc = "Field `C1STOPF` reader - System Stop0, 1 flag for CPU1. (All core states retained)"]
pub type C1STOPF_R = crate::BitReader<C1STOPF_A>;
#[doc = "System Stop0, 1 flag for CPU1. (All core states retained)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum C1STOPF_A {
#[doc = "0: System has not been in Stop 0 or 1 mode"]
NoStop = 0,
#[doc = "1: System has been in Stop 0 or 1 mode"]
Stop = 1,
}
impl From<C1STOPF_A> for bool {
#[inline(always)]
fn from(variant: C1STOPF_A) -> Self {
variant as u8 != 0
}
}
impl C1STOPF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> C1STOPF_A {
match self.bits {
false => C1STOPF_A::NoStop,
true => C1STOPF_A::Stop,
}
}
#[doc = "System has not been in Stop 0 or 1 mode"]
#[inline(always)]
pub fn is_no_stop(&self) -> bool {
*self == C1STOPF_A::NoStop
}
#[doc = "System has been in Stop 0 or 1 mode"]
#[inline(always)]
pub fn is_stop(&self) -> bool {
*self == C1STOPF_A::Stop
}
}
#[doc = "Field `C1DS` reader - CPU1 deepsleep mode"]
pub type C1DS_R = crate::BitReader<C1DS_A>;
#[doc = "CPU1 deepsleep mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum C1DS_A {
#[doc = "0: CPU is running or in sleep"]
RunningOrSleep = 0,
#[doc = "1: CPU is in Deep-Sleep"]
DeepSleep = 1,
}
impl From<C1DS_A> for bool {
#[inline(always)]
fn from(variant: C1DS_A) -> Self {
variant as u8 != 0
}
}
impl C1DS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> C1DS_A {
match self.bits {
false => C1DS_A::RunningOrSleep,
true => C1DS_A::DeepSleep,
}
}
#[doc = "CPU is running or in sleep"]
#[inline(always)]
pub fn is_running_or_sleep(&self) -> bool {
*self == C1DS_A::RunningOrSleep
}
#[doc = "CPU is in Deep-Sleep"]
#[inline(always)]
pub fn is_deep_sleep(&self) -> bool {
*self == C1DS_A::DeepSleep
}
}
impl R {
#[doc = "Bit 8 - System Standby flag for CPU1. (no core states retained)"]
#[inline(always)]
pub fn c1sbf(&self) -> C1SBF_R {
C1SBF_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - System Stop2 flag for CPU1. (partial core states retained)"]
#[inline(always)]
pub fn c1stop2f(&self) -> C1STOP2F_R {
C1STOP2F_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - System Stop0, 1 flag for CPU1. (All core states retained)"]
#[inline(always)]
pub fn c1stopf(&self) -> C1STOPF_R {
C1STOPF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 14 - CPU1 deepsleep mode"]
#[inline(always)]
pub fn c1ds(&self) -> C1DS_R {
C1DS_R::new(((self.bits >> 14) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Clear CPU1 Stop Standby flags"]
#[inline(always)]
#[must_use]
pub fn c1cssf(&mut self) -> C1CSSF_W<EXTSCR_SPEC, 0> {
C1CSSF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Power extended status and status clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`extscr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`extscr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct EXTSCR_SPEC;
impl crate::RegisterSpec for EXTSCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`extscr::R`](R) reader structure"]
impl crate::Readable for EXTSCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`extscr::W`](W) writer structure"]
impl crate::Writable for EXTSCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets EXTSCR to value 0"]
impl crate::Resettable for EXTSCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Substrate RPC servers.
#![warn(missing_docs)]
mod middleware;
use jsonrpc_core::{IoHandlerExtension, MetaIoHandler};
use log::error;
use pubsub::PubSubMetadata;
use std::io;
/// Maximal payload accepted by RPC servers.
const MAX_PAYLOAD: usize = 15 * 1024 * 1024;
/// Default maximum number of connections for WS RPC servers.
const WS_MAX_CONNECTIONS: usize = 100;
/// The RPC IoHandler containing all requested APIs.
pub type RpcHandler<T> = pubsub::PubSubHandler<T, RpcMiddleware>;
pub use self::inner::*;
pub use middleware::{RpcMetrics, RpcMiddleware};
/// Construct rpc `IoHandler`
pub fn rpc_handler<M: PubSubMetadata>(
extension: impl IoHandlerExtension<M>,
rpc_middleware: RpcMiddleware,
) -> RpcHandler<M> {
let io_handler = MetaIoHandler::with_middleware(rpc_middleware);
let mut io = pubsub::PubSubHandler::new(io_handler);
extension.augment(&mut io);
// add an endpoint to list all available methods.
let mut methods = io.iter().map(|x| x.0.clone()).collect::<Vec<String>>();
io.add_method("rpc_methods", {
methods.sort();
let methods = serde_json::to_value(&methods)
.expect("Serialization of Vec<String> is infallible; qed");
move |_| {
Ok(serde_json::json!({
"version": 1,
"methods": methods.clone(),
}))
}
});
io
}
#[cfg(not(target_os = "unknown"))]
mod inner {
use super::*;
/// Type alias for ipc server
pub type IpcServer = ipc::Server;
/// Type alias for http server
pub type HttpServer = http::Server;
/// Type alias for ws server
pub type WsServer = ws::Server;
/// Start HTTP server listening on given address.
///
/// **Note**: Only available if `not(target_os = "unknown")`.
pub fn start_http<M: pubsub::PubSubMetadata + Default>(
addr: &std::net::SocketAddr,
cors: Option<&Vec<String>>,
io: RpcHandler<M>,
) -> io::Result<http::Server> {
http::ServerBuilder::new(io)
.threads(4)
.health_api(("/health", "system_health"))
.allowed_hosts(hosts_filtering(cors.is_some()))
.rest_api(if cors.is_some() { http::RestApi::Secure } else { http::RestApi::Unsecure })
.cors(map_cors::<http::AccessControlAllowOrigin>(cors))
.max_request_body_size(MAX_PAYLOAD)
.start_http(addr)
}
/// Start IPC server listening on given path.
///
/// **Note**: Only available if `not(target_os = "unknown")`.
pub fn start_ipc<M: pubsub::PubSubMetadata + Default>(
addr: &str,
io: RpcHandler<M>,
) -> io::Result<ipc::Server> {
let builder = ipc::ServerBuilder::new(io);
#[cfg(target_os = "unix")]
builder.set_security_attributes({
let security_attributes = ipc::SecurityAttributes::empty();
security_attributes.set_mode(0o600)?;
security_attributes
});
builder.start(addr)
}
/// Start WS server listening on given address.
///
/// **Note**: Only available if `not(target_os = "unknown")`.
pub fn start_ws<
M: pubsub::PubSubMetadata + From<jsonrpc_core::futures::sync::mpsc::Sender<String>>,
>(
addr: &std::net::SocketAddr,
max_connections: Option<usize>,
cors: Option<&Vec<String>>,
io: RpcHandler<M>,
) -> io::Result<ws::Server> {
ws::ServerBuilder::with_meta_extractor(io, |context: &ws::RequestContext| {
context.sender().into()
})
.max_payload(MAX_PAYLOAD)
.max_connections(max_connections.unwrap_or(WS_MAX_CONNECTIONS))
.allowed_origins(map_cors(cors))
.allowed_hosts(hosts_filtering(cors.is_some()))
.start(addr)
.map_err(|err| match err {
ws::Error::Io(io) => io,
ws::Error::ConnectionClosed => io::ErrorKind::BrokenPipe.into(),
e => {
error!("{}", e);
io::ErrorKind::Other.into()
},
})
}
fn map_cors<T: for<'a> From<&'a str>>(
cors: Option<&Vec<String>>,
) -> http::DomainsValidation<T> {
cors.map(|x| x.iter().map(AsRef::as_ref).map(Into::into).collect::<Vec<_>>()).into()
}
fn hosts_filtering(enable: bool) -> http::DomainsValidation<http::Host> {
if enable {
// NOTE The listening address is whitelisted by default.
// Setting an empty vector here enables the validation
// and allows only the listening address.
http::DomainsValidation::AllowOnly(vec![])
} else {
http::DomainsValidation::Disabled
}
}
}
#[cfg(target_os = "unknown")]
mod inner {}
|
//! This module handles the parsing of a [template](`super::Template`).
#[cfg(test)]
mod tests;
use color_eyre::eyre::{eyre, Result};
use color_eyre::Report;
use super::block::{Block, BlockHint, If, IfExpr, IfOp, Var, VarEnv, VarEnvSet};
use super::diagnostic::{Diagnostic, DiagnosticBuilder, DiagnosticLevel};
use super::session::Session;
use super::source::Source;
use super::span::{ByteSpan, Pos, Spanned};
use super::Template;
use crate::template::block::BlockKind;
/// This is the parser which converts a [source](`super::source::Source`) into
/// [blocks](`super::block::Block`), which make up a
/// [template](`super::Template`).
#[derive(Debug, Clone)]
pub struct Parser<'a> {
/// The source which will be parsed.
source: Source<'a>,
/// The session where parsing errors/diagnostics will be recorded to.
session: Session,
/// An iterator of all blocks found within `source`.
blocks: BlockIter<'a>,
}
impl<'a> Parser<'a> {
/// Creates a new parser for the given `source`.
pub const fn new(source: Source<'a>) -> Self {
let blocks = BlockIter::new(source.content);
Self {
source,
session: Session::new(),
blocks,
}
}
/// Consumes self and tries to resolve each block found within
/// [`Parser::source`].
///
/// If no errors occurred it will return a [template](`super::Template`).
pub fn parse(mut self) -> Result<Template<'a>> {
let mut blocks = Vec::new();
while let Some(res) = self.next_top_level_block() {
match res {
Ok(block) => blocks.push(block),
Err(builder) => self.report_diagnostic(builder.build()),
};
}
self.session.emit(&self.source);
self.session.try_finish()?;
Ok(Template {
source: self.source,
blocks,
})
}
/// Adds a diagnostic to the session.
///
/// If [Diagnostic::level](`super::diagnostic::Diagnostic::level`) is
/// [DiagnosticLevel::Error](`super::diagnostic::DiagnosticLevel::Error`)
/// it will also mark the session as failed.
fn report_diagnostic(&mut self, diagnostic: Diagnostic) {
if diagnostic.level() == &DiagnosticLevel::Error {
self.session.mark_failed();
}
self.session.report(diagnostic);
}
/// Tries to resolve the next "top-level" block.
///
/// Top-level block in this context means, a block which can appear on it's
/// own without needing another block preceding it.
///
/// # Examples
///
/// - Top-level block: [BlockHint::Print](`super::block::BlockHint::Print`) as it can stand on it's own.
/// - **NONE** top-level block: [BlockHint::ElIf](`super::block::BlockHint::ElIf`) as it needs to be after a preceding [BlockHint::IfStart](`super::block::BlockHint::IfStart`) block.
///
/// # Errors
///
/// Returns an error if [`Parser::blocks`] failed to get the next block.
/// Returns an error if a none top-level block was found.
fn next_top_level_block(&mut self) -> Option<Result<Block, DiagnosticBuilder>> {
let Spanned { span, value: hint } = match self.blocks.next()? {
Ok(x) => x,
Err(err) => return Some(Err(err)),
};
log::trace!("{:?}: {}", hint, &self.source[span]);
let block = match hint {
BlockHint::Text => Ok(self.parse_text(span)),
BlockHint::Comment => Ok(self.parse_comment(span)),
BlockHint::Escaped => Ok(self.parse_escaped(span)),
BlockHint::Var => self
.parse_variable(span)
.map(|var| Block::new(span, BlockKind::Var(var))),
BlockHint::Print => Ok(self.parse_print(span)),
BlockHint::IfStart => self
.parse_if(span)
.map(|Spanned { span, value }| Block::new(span, BlockKind::If(value))),
// Illegal top level blocks
BlockHint::ElIf => Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("top-level `elif` block")
.description("an `elif` block must always come after an `if` block")
.primary_span(span)),
BlockHint::Else => Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("top-level `else` block")
.description("an `else` block must always come after an `if` or `elfi` block")
.primary_span(span)),
BlockHint::IfEnd => Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("top-level `fi` block")
.description("an `fi` can only be used to close an open `if` block")
.primary_span(span)),
};
Some(block)
}
/// Resolves the `span` to a block with
/// [BlockKind::Text](`super::block::BlockKind::Text`).
const fn parse_text(&self, span: ByteSpan) -> Block {
Block::new(span, BlockKind::Text)
}
/// Resolves the `span` to a block with
/// [BlockKind::Comment](`super::block::BlockKind::Comment`).
const fn parse_comment(&self, span: ByteSpan) -> Block {
// {{!-- ... --}}
Block::new(span, BlockKind::Comment)
}
/// Resolves the `span` to a block with
/// [BlockKind::Escaped](`super::block::BlockKind::Escaped`).
fn parse_escaped(&self, span: ByteSpan) -> Block {
// {{{ ... }}}
Block::new(span, BlockKind::Escaped(span.offset_low(3).offset_high(-3)))
}
/// Tries to resolves the `span` to a block with
/// [BlockKind::Var](`super::block::BlockKind::Var`).
///
/// # Errors
///
/// Returns an error if the call to [`parse_var`] fails.
fn parse_variable(&self, span: ByteSpan) -> Result<Var, DiagnosticBuilder> {
let span_inner = span.offset_low(2).offset_high(-2);
let content_inner = &self.source[span_inner];
// +2 for block opening
let offset = span.low().as_usize() + 2;
parse_var(content_inner, offset).map_err(|err| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("failed to parse variable block")
.description(err.to_string())
.primary_span(span)
})
}
/// Resolves the `span` to a block with
/// [BlockKind::Print](`super::block::BlockKind::Print`).
fn parse_print(&self, span: ByteSpan) -> Block {
// {{@print ... }}
Block::new(span, BlockKind::Print(span.offset_low(9).offset_high(-2)))
}
/// Tries to resolves the `span` to a block with
/// [BlockKind::If](`super::block::BlockKind::If`).
///
/// During this operation it will also try to parse all other blocks
/// contained between the if related blocks.
///
/// # Examples
///
/// ```text
/// {{@if ...}}
/// {{@print ...}} <-- contained block
/// {{@else}}
/// {{ ... }} <-- another contained block
/// {{@fi}}
/// ```
///
/// # Errors
///
/// Returns an error if a call to [`parse_var`] fails.
/// Returns an error if no closing [BlockHint::IfEnd](`super::block::BlockHint::IfEnd`) was found.
/// Bubbles up any error which may occur during the subsequent calls to
/// [`Parser::parse_if_enclosed_blocks`].
fn parse_if(&mut self, span: ByteSpan) -> Result<Spanned<If>, DiagnosticBuilder> {
let head = span.span(
self.parse_if_start(span)
.map_err(|build| build.label_span(span, "while parsing this `if` block"))?,
);
// collect all nested blocks
let head_nested = self
.parse_if_enclosed_blocks()
.into_iter()
.filter_map(|res| match res {
Ok(block) => Some(block),
Err(builder) => {
self.report_diagnostic(
builder
.label_span(*head.span(), "while parsing this `if` block")
.build(),
);
None
}
})
.collect();
let Spanned {
mut span,
value: mut hint,
} = self
.blocks
.next()
.ok_or_else(|| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("unexpected end of `if` block")
.description("close the `if` block with `{{@fi}}`")
.primary_span(span)
.label_span(*head.span(), "While parsing this `if` block")
})?
.map_err(|build| build.label_span(*head.span(), "while parsing this `if` block"))?;
// check for elif
let mut elifs = Vec::new();
while hint == BlockHint::ElIf {
let elif = span.span(self.parse_elif(span).map_err(|build| {
build.label_span(*head.span(), "while parsing this `if` block")
})?);
let elif_nested = self
.parse_if_enclosed_blocks()
.into_iter()
.filter_map(|res| match res {
Ok(block) => Some(block),
Err(builder) => {
self.report_diagnostic(
builder
.label_span(span, "while parsing this `elif` block")
.build(),
);
None
}
})
.collect();
elifs.push((elif, elif_nested));
let Spanned {
span: _span,
value: _hint,
} = self
.blocks
.next()
.ok_or_else(|| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("unexpected end of `elif` block")
.description("close the `if` block with `{{@fi}}`")
.primary_span(span)
.label_span(*head.span(), "While parsing this `if` block")
})?
.map_err(|build| build.label_span(*head.span(), "while parsing this `if` block"))?;
span = _span;
hint = _hint;
}
let els = if hint == BlockHint::Else {
let els = self
.parse_else(span)
.map_err(|build| build.label_span(*head.span(), "while parsing this `if` block"))?;
let els_nested = self
.parse_if_enclosed_blocks()
.into_iter()
.filter_map(|res| match res {
Ok(block) => Some(block),
Err(builder) => {
self.report_diagnostic(
builder
.label_span(span, "while parsing this `else` block")
.build(),
);
None
}
})
.collect();
let Spanned {
span: _span,
value: _hint,
} = self
.blocks
.next()
.ok_or_else(|| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("unexpected end of `else` block")
.description("close the `if` block with `{{@fi}}`")
.primary_span(span)
.label_span(*head.span(), "While parsing this `if` block")
})?
.map_err(|build| build.label_span(*head.span(), "while parsing this `if` block"))?;
span = _span;
hint = _hint;
Some((els, els_nested))
} else {
None
};
let end = if hint == BlockHint::IfEnd {
self.parse_if_end(span)
.map_err(|build| build.label_span(*head.span(), "while parsing this `if` block"))?
} else {
return Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("unexpected end of `if` block")
.description("close the `if` block with `{{@fi}}`")
.primary_span(span)
.label_span(*head.span(), "While parsing this `if` block"));
};
let whole_if = head.span.union(&end);
Ok(whole_if.span(If {
head: (head, head_nested),
elifs,
els,
end,
}))
}
/// Tries to resolves the `span` which contains a whole
/// [BlockHint::IfStart](`super::block::BlockHint::IfStart`) to a
/// [IfExpr](`super::block::IfExpr`).
///
/// # Errors
///
/// An error is returned if it fails to resolve the expression (related:
/// [`Parser::parse_if_expr`]).
fn parse_if_start(&self, span: ByteSpan) -> Result<IfExpr, DiagnosticBuilder> {
// {{@if {{VAR}} (!=|==) "LIT" }}
let expr_span = span.offset_low(6).offset_high(-2);
self.parse_if_expr(expr_span)
}
/// Tries to resolves the `span` which contains a whole
/// [BlockHint::ElIf](`super::block::BlockHint::ElIf`) to a
/// [IfExpr](`super::block::IfExpr`).
///
/// # Errors
///
/// An error is returned if it fails to resolve the expression (related:
/// [`Parser::parse_if_expr`]).
fn parse_elif(&self, span: ByteSpan) -> Result<IfExpr, DiagnosticBuilder> {
// {{@elif {{VAR}} (!=|==) "LIT" }}
let expr_span = span.offset_low(8).offset_high(-2);
self.parse_if_expr(expr_span)
}
/// Tries to resolves the `span` which contains a whole
/// [BlockHint::Else](`super::block::BlockHint::Else`) to a
/// [IfExpr](`super::block::IfExpr`).
///
/// # Errors
///
/// An error is returned if it fails to resolve the expression (related:
/// [`Parser::parse_if_expr`]).
fn parse_else(&self, span: ByteSpan) -> Result<ByteSpan, DiagnosticBuilder> {
if &self.source[span] != "{{@else}}" {
Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("expected a `else` block")
.primary_span(span))
} else {
Ok(span)
}
}
/// Tries to validate if `span` contains a valid
/// [BlockHint::IfEnd](`super::block::BlockHint::IfEnd`).
///
/// # Errors
///
/// An error is returned if `span` does not contain a
/// [BlockHint::IfEnd](`super::block::BlockHint::IfEnd`).
fn parse_if_end(&self, span: ByteSpan) -> Result<ByteSpan, DiagnosticBuilder> {
if &self.source[span] != "{{@fi}}" {
Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("expected a `fi` block")
.primary_span(span))
} else {
Ok(span)
}
}
/// Tries to resolve all components that make up an if expression.
///
/// These currently come in two forms:
///
/// - {{VAR}} (!=|==) "OTHER": Compare value of VAR with the literal OTHER
/// - (!){{VAR}}: Checks if the variable is (not) present/can (not) be resolved.
///
/// # Errors
///
/// An error is returned if `span` can not be interpreted as an if
/// expression.
fn parse_if_expr(&self, span: ByteSpan) -> Result<IfExpr, DiagnosticBuilder> {
// {{VAR}} (!=|==) "OTHER" OR (!){{VAR}}
let content = &self.source[span];
// Read optional `!` for not_exists
let hat_not_present_prefix = content.trim().as_bytes().starts_with(b"!");
// read var
let var_block_start = content.find("{{").ok_or_else(|| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("expected a variable block")
.description("add a variable block with `{{VARIABLE_NAME}}`")
.primary_span(span)
})?;
let var_block_end = content.find("}}").ok_or_else(|| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("variable block not closed")
.description("add `}}` to the close the open variable block")
.primary_span(ByteSpan::new(var_block_start, var_block_start + 2))
})? + 2;
let var_block_span = ByteSpan::new(
span.low().as_usize() + var_block_start,
span.low().as_usize() + var_block_end,
);
let var = self.parse_variable(var_block_span)?;
// check if it is an exits expr
// exclude the closing `}}` with -2.
let remainder = &content[var_block_end..];
if remainder.trim().is_empty() {
if hat_not_present_prefix {
Ok(IfExpr::NotExists { var })
} else {
Ok(IfExpr::Exists { var })
}
} else {
let op = parse_ifop(&content[var_block_end..]).map_err(|_| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("failed to find if operation")
.description("add either `==` or `!=` after the variable block")
.primary_span(var_block_span)
})?;
let other = parse_other(
&content[var_block_end..],
span.low().as_usize() + var_block_end,
)
.map_err(|_| {
DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("failed to find right hand side of the if operation")
.description("add a literal to compare against with `\"LITERAL\"`")
.primary_span(var_block_span)
})?;
Ok(IfExpr::Compare { var, op, other })
}
}
/// Eagerly tries to parse all "non-if blocks" (related:
/// [BlockHint::is_if_subblock](`super::block::BlockHint::is_if_subblock`))
/// and collects them into a vector. If an "if-subblock" is found it
/// returns all blocks found before it. The next block [`Parser::blocks`]
/// will return is the found "if-subblock".
fn parse_if_enclosed_blocks(&mut self) -> Vec<Result<Block, DiagnosticBuilder>> {
let mut enclosed_blocks = Vec::new();
while let Some(true) = self.peek_block_hint().map(|hint| !hint.is_if_subblock()) {
let next_block = self
.next_top_level_block()
.expect("Some block to be present after peek");
enclosed_blocks.push(next_block);
}
enclosed_blocks
}
/// Peeks at the next block hint. This does not affect any state of the
/// resolver.
fn peek_block_hint(&self) -> Option<BlockHint> {
// Create a copy of the block iter to not mess up the state while peeking
let mut peek = self.blocks;
peek.next()?.ok().map(|spanned| spanned.into_value())
}
}
/// A span together with an optional block hint, describing the type of the
/// block contained by the span.
type NextBlock = (ByteSpan, Option<BlockHint>);
/// An error together with the amount of bytes to skip to continue parsing. The
/// amount tries to skip the erroneous part.
type NextBlockError = (Option<usize>, Report);
/// Tries to find the next block contained in `s`.
///
/// It first tries to search for the "special" blocks and if non match, the
/// block is of type [BlockHint::Text](`super::block::BlockHint::Text`). It
/// does not skip any part of `s`, as the next block will always start at index
/// `0` of `s`.
///
/// # Errors
///
/// An error is returned if the start of a block was detected but no closing
/// counterpart was found.
fn next_block(s: &str) -> Option<Result<NextBlock, NextBlockError>> {
if s.is_empty() {
return None;
}
if let Some(low) = s.find("{{") {
if low > 0 {
// found text block
Some(Ok((ByteSpan::new(0usize, low), Some(BlockHint::Text))))
} else if let Some(b'{') = s.as_bytes().get(low + 2) {
// block is an escaped block
if let Some(high) = s.find("}}}") {
Some(Ok((ByteSpan::new(low, high + 3), Some(BlockHint::Escaped))))
} else {
Some(Err((
Some(3),
eyre!("Found opening for an escaped block but no closing"),
)))
}
} else if let Some(b"!--") = s.as_bytes().get(low + 2..low + 5) {
// block is an comment block
if let Some(high) = s.find("--}}") {
Some(Ok((ByteSpan::new(low, high + 4), Some(BlockHint::Comment))))
} else {
Some(Err((
Some(5),
eyre!("Found opening for a comment block but no closing"),
)))
}
} else {
// check depth
let mut openings = s[low + 1..].match_indices("{{").map(|(idx, _)| idx);
let closings = s[low + 1..].match_indices("}}").map(|(idx, _)| idx);
for high in closings {
// check the is a opening.
if let Some(opening) = openings.next() {
// check if opening comes before the closing.
if opening < high {
// opening lies before the closing. Continue to search
// for the matching closing of low.
continue;
}
}
let high = high + 2 + (low + 1);
return Some(Ok((ByteSpan::new(low, high), None)));
}
Some(Err((
Some(2),
eyre!("Found opening for a block but no closing"),
)))
}
} else {
// Found text block
Some(Ok((ByteSpan::new(0usize, s.len()), Some(BlockHint::Text))))
}
}
/// Tries to parse `inner` as a [`Var`](`super::block::Var`).
///
/// The offset is used to correctly locate `inner` in a bigger parent string,
/// as `inner` is supposed to be only a small slice from a bigger string. This
/// means, that all calculated indices are offset by `offset`.
///
/// # Note
///
/// `inner` must be without the `{{` and `}}`.
/// `offset` must include the starting `{{`.
///
/// # Errors
///
/// An error is returned if a (variable environment)[`super::block::VarEnv`]
/// was found more than once.
/// An error is returned if the name of the variable is not valid (related:
/// [`is_var_name_symbol`]).
fn parse_var(inner: &str, mut offset: usize) -> Result<Var> {
// save original length to keep track of the offset
let orig_len = inner.len();
// remove preceding white spaces
let inner = inner.trim_start();
// increase offset to account for removed white spaces
offset += orig_len - inner.len();
// remove trailing white spaces. Offset doesn't need to change.
let mut inner = inner.trim_end();
// check for envs
let envs = if matches!(
inner.as_bytes().first(),
Some(b'$') | Some(b'#') | Some(b'&')
) {
let mut env_set = VarEnvSet::empty();
// try to read all available envs
for idx in 0..env_set.capacity() {
let env = match inner.as_bytes().get(idx) {
Some(b'$') => VarEnv::Environment,
Some(b'#') => VarEnv::Profile,
Some(b'&') => VarEnv::Dotfile,
_ => break,
};
// break if add fails (duplicate)
if !env_set.add(env) {
return Err(eyre!(
"Specified duplicate variable environments at {}",
offset
));
}
}
// adjust offset
offset += env_set.len();
inner = &inner[env_set.len()..];
env_set
} else {
VarEnvSet::default()
};
// check var name
// - len > 0
// - only ascii + _
if inner.is_empty() {
Err(eyre!("Empty variable name at {}", offset))
} else if let Some(invalid) = inner.as_bytes().iter().find(|&&b| !is_var_name_symbol(b)) {
Err(eyre!(
"Found invalid symbol in variable name: (b`{}`; c`{}`)",
invalid,
if invalid.is_ascii() {
*invalid as char
} else {
'\0'
}
))
} else {
Ok(Var {
envs,
name: ByteSpan::new(offset, offset + inner.len()),
})
}
}
/// Tries to parse the content of `inner` as an (IfOp)[`super::block::IfOp`].
///
/// # Errors
///
/// An error is returned if `inner` could not be interpreted as an if operand.
fn parse_ifop(inner: &str) -> Result<IfOp> {
match (inner.find("=="), inner.find("!=")) {
(Some(eq_idx), Some(noteq_idx)) => {
if eq_idx < noteq_idx {
Ok(IfOp::Eq)
} else {
Ok(IfOp::NotEq)
}
}
(Some(_), None) => Ok(IfOp::Eq),
(None, Some(_)) => Ok(IfOp::NotEq),
_ => Err(eyre!("Failed to find a if operand")),
}
}
/// Parses the right hand side of an if/elif compare operand, which is string literal. The `"`
/// characters are not included in the returned span.
///
/// # Errors
///
/// An error is returned if no `"` was found.
/// An error is returned if a opening `"` was found but no closing one.
fn parse_other(inner: &str, offset: usize) -> Result<ByteSpan> {
let mut matches = inner.match_indices('"').map(|(idx, _)| idx);
match (matches.next(), matches.next()) {
(Some(low), Some(high)) => Ok(ByteSpan::new(offset + low + 1, offset + high)),
(Some(low), None) => Err(eyre!(
"Found opening `\"` at {} but no closing",
offset + low
)),
_ => Err(eyre!("Found no other")),
}
}
/// Checks if `b` is considered to be a valid byte for a [variable](`super::block::Var`)
/// identifier.
const fn is_var_name_symbol(b: u8) -> bool {
b.is_ascii_alphanumeric() || b == b'_'
}
/// An iterator over all [blocks](`super::block::BlockHint`) of a string.
#[derive(Debug, Clone, Copy)]
struct BlockIter<'a> {
/// Content to iterate over.
content: &'a str,
/// Current index into `content`.
index: usize,
}
impl<'a> BlockIter<'a> {
/// Creates a new instance for `content`.
const fn new(content: &'a str) -> Self {
Self { content, index: 0 }
}
}
impl<'a> Iterator for BlockIter<'a> {
type Item = Result<Spanned<BlockHint>, DiagnosticBuilder>;
fn next(&mut self) -> Option<Self::Item> {
let (mut span, hint) = match next_block(&self.content[self.index..])? {
Ok(x) => x,
Err((skip, err)) => {
// skip erroneous part to allow recovery and avoid infinite loops
let span = ByteSpan::new(self.index, self.index);
if let Some(skip) = skip {
self.index += skip;
log::trace!("Skipping: {} ({})", skip, &self.content[self.index..]);
} else {
self.index = self.content.len();
}
let span = span.with_high(self.index);
log::trace!("Span: {}/{}", span, err);
return Some(Err(DiagnosticBuilder::new(DiagnosticLevel::Error)
.message("failed to parse block")
.description(err.to_string())
.primary_span(span)));
}
};
span = span.offset(self.index as i32);
self.index = span.high().as_usize();
if let Some(hint) = hint {
return Some(Ok(span.span(hint)));
}
let content = &self.content[span];
// Check if its a text block (no opening and closing `{{\}}`)
if !content.starts_with("{{") {
return Some(Ok(span.span(BlockHint::Text)));
}
// Content without block opening and closing
let content = &content[2..content.len() - 2];
// Check for escaped
// e.g. `{{{ Escaped }}}`
if content.starts_with('{') && content.ends_with('}') {
return Some(Ok(span.span(BlockHint::Escaped)));
}
// Check for comment
// e.g. `{{!-- Comment --}}`
if content.starts_with("!--") && content.ends_with("--") {
return Some(Ok(span.span(BlockHint::Comment)));
}
// Check for print
// e.g. `{{@print ... }}`
if content.starts_with("@print ") {
return Some(Ok(span.span(BlockHint::Print)));
}
// Check for if
// e.g. `{{@if {{VAR}} == "LITERAL"}}`
if content.starts_with("@if ") {
return Some(Ok(span.span(BlockHint::IfStart)));
}
// Check for elif
// e.g. `{{@elif {{VAR}} == "LITERAL"}}`
if content.starts_with("@elif ") {
return Some(Ok(span.span(BlockHint::ElIf)));
}
// Check for else
// e.g. `{{@else}}`
if content.starts_with("@else") {
return Some(Ok(span.span(BlockHint::Else)));
}
// Check for fi
// e.g. `{{@fi}}`
if content.starts_with("@fi") {
return Some(Ok(span.span(BlockHint::IfEnd)));
}
Some(Ok(span.span(BlockHint::Var)))
}
}
|
//! Copied for `hyper::header::shared`;
pub use self::charset::Charset;
pub use self::encoding::Encoding;
pub use self::entity::EntityTag;
pub use self::httpdate::HttpDate;
pub use language_tags::LanguageTag;
pub use self::quality_item::{Quality, QualityItem, qitem, q};
mod charset;
mod entity;
mod encoding;
mod httpdate;
mod quality_item;
|
use std::io;
use std::future::Future;
use std::net::SocketAddr;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use rustls::internal::pemfile;
use rustls::{Certificate, PrivateKey, ServerConfig};
use tokio::net::{TcpListener, TcpStream};
use tokio_rustls::{TlsAcceptor, Accept, server::TlsStream};
use tokio_rustls::rustls;
use crate::listener::{Connection, Listener};
fn load_certs(reader: &mut dyn io::BufRead) -> io::Result<Vec<Certificate>> {
pemfile::certs(reader)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid certificate"))
}
fn load_private_key(reader: &mut dyn io::BufRead) -> io::Result<PrivateKey> {
use std::io::{Cursor, Error, Read, ErrorKind::Other};
// "rsa" (PKCS1) PEM files have a different first-line header than PKCS8
// PEM files, use that to determine the parse function to use.
let mut first_line = String::new();
reader.read_line(&mut first_line)?;
let private_keys_fn = match first_line.trim_end() {
"-----BEGIN RSA PRIVATE KEY-----" => pemfile::rsa_private_keys,
"-----BEGIN PRIVATE KEY-----" => pemfile::pkcs8_private_keys,
_ => return Err(Error::new(Other, "invalid key header"))
};
let key = private_keys_fn(&mut Cursor::new(first_line).chain(reader))
.map_err(|_| Error::new(Other, "invalid key file"))
.and_then(|mut keys| match keys.len() {
0 => Err(Error::new(Other, "no valid keys found; is the file malformed?")),
1 => Ok(keys.remove(0)),
n => Err(Error::new(Other, format!("expected 1 key, found {}", n))),
})?;
// Ensure we can use the key.
rustls::sign::any_supported_type(&key)
.map_err(|_| Error::new(Other, "key parsed but is unusable"))
.map(|_| key)
}
pub struct TlsListener {
listener: TcpListener,
acceptor: TlsAcceptor,
state: TlsListenerState,
}
enum TlsListenerState {
Listening,
Accepting(Accept<TcpStream>),
}
impl Listener for TlsListener {
type Connection = TlsStream<TcpStream>;
fn local_addr(&self) -> Option<SocketAddr> {
self.listener.local_addr().ok()
}
fn poll_accept(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>
) -> Poll<io::Result<Self::Connection>> {
loop {
match self.state {
TlsListenerState::Listening => {
match self.listener.poll_accept(cx) {
Poll::Pending => return Poll::Pending,
Poll::Ready(Err(e)) => return Poll::Ready(Err(e)),
Poll::Ready(Ok((stream, _addr))) => {
let fut = self.acceptor.accept(stream);
self.state = TlsListenerState::Accepting(fut);
}
}
}
TlsListenerState::Accepting(ref mut fut) => {
match Pin::new(fut).poll(cx) {
Poll::Pending => return Poll::Pending,
Poll::Ready(result) => {
self.state = TlsListenerState::Listening;
return Poll::Ready(result);
}
}
}
}
}
}
}
pub async fn bind_tls<C: io::BufRead + Send, K: io::BufRead + Send>(
address: SocketAddr,
mut cert_chain: C,
mut private_key: K,
) -> io::Result<TlsListener> {
let cert_chain = load_certs(&mut cert_chain).map_err(|e| {
let msg = format!("malformed TLS certificate chain: {}", e);
io::Error::new(e.kind(), msg)
})?;
let key = load_private_key(&mut private_key).map_err(|e| {
let msg = format!("malformed TLS private key: {}", e);
io::Error::new(e.kind(), msg)
})?;
let listener = TcpListener::bind(address).await?;
let client_auth = rustls::NoClientAuth::new();
let mut tls_config = ServerConfig::new(client_auth);
let cache = rustls::ServerSessionMemoryCache::new(1024);
tls_config.set_persistence(cache);
tls_config.ticketer = rustls::Ticketer::new();
tls_config.set_single_cert(cert_chain, key).expect("invalid key");
tls_config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]);
let acceptor = TlsAcceptor::from(Arc::new(tls_config));
let state = TlsListenerState::Listening;
Ok(TlsListener { listener, acceptor, state })
}
impl Connection for TlsStream<TcpStream> {
fn remote_addr(&self) -> Option<SocketAddr> {
self.get_ref().0.remote_addr()
}
}
#[cfg(test)]
mod test {
use super::*;
use std::io::Cursor;
macro_rules! tls_example_key {
($k:expr) => {
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/../../examples/tls/private/", $k))
}
}
#[test]
fn verify_load_private_keys_of_different_types() -> io::Result<()> {
let rsa_sha256_key = tls_example_key!("rsa_sha256_key.pem");
let ecdsa_nistp256_sha256_key = tls_example_key!("ecdsa_nistp256_sha256_key_pkcs8.pem");
let ecdsa_nistp384_sha384_key = tls_example_key!("ecdsa_nistp384_sha384_key_pkcs8.pem");
let ed2551_key = tls_example_key!("ed25519_key.pem");
load_private_key(&mut Cursor::new(rsa_sha256_key))?;
load_private_key(&mut Cursor::new(ecdsa_nistp256_sha256_key))?;
load_private_key(&mut Cursor::new(ecdsa_nistp384_sha384_key))?;
load_private_key(&mut Cursor::new(ed2551_key))?;
Ok(())
}
#[test]
fn verify_load_certs_of_different_types() -> io::Result<()> {
let rsa_sha256_cert = tls_example_key!("rsa_sha256_cert.pem");
let ecdsa_nistp256_sha256_cert = tls_example_key!("ecdsa_nistp256_sha256_cert.pem");
let ecdsa_nistp384_sha384_cert = tls_example_key!("ecdsa_nistp384_sha384_cert.pem");
let ed2551_cert = tls_example_key!("ed25519_cert.pem");
load_certs(&mut Cursor::new(rsa_sha256_cert))?;
load_certs(&mut Cursor::new(ecdsa_nistp256_sha256_cert))?;
load_certs(&mut Cursor::new(ecdsa_nistp384_sha384_cert))?;
load_certs(&mut Cursor::new(ed2551_cert))?;
Ok(())
}
}
|
//! Wrapper around the raw Lua context. These define safe methods that ensure
//! correct use of the Lua stack.
use lua_sys::*;
use std::path::PathBuf;
use std::ffi::{CString, CStr};
use std::ops::{Deref, DerefMut};
const ALREADY_DEFINED: i32 = 0;
/// Wrapper around the raw Lua context. When necessary, the raw Lua context can
/// be retrived.
#[derive(Debug)]
pub struct Lua(pub *mut lua_State);
unsafe impl Send for Lua {}
unsafe impl Sync for Lua {}
/// Errors while interacting with Lua
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum LuaErr {
/// There was an error loading the configuration file
Load(FFIErr),
/// Evaluation error from Lua
Eval(String),
/// There was an FFI error during evalution
EvalFFI(FFIErr),
/// There was an error loading in arguments from the Lua call
ArgumentInvalid,
/// A variable was already defined.
AlreadyDefined(String),
/// Could not find configuration file with the given path.
/// Reason given from Lua as a string.
FileNotFound(PathBuf, String)
}
/// Errors while interfacing with C
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum FFIErr {
/// String had invalid UTF-8 encoding.
InvalidUTF(String),
/// String contained a null byte.
NullByte(String),
/// Could not convert from C string to UTF-8 encoded one
Conversion(CString)
}
impl Lua {
pub fn new() -> Self {
unsafe {
let lua = luaL_newstate();
if lua.is_null() {
panic!("luaL_newstate returned NULL");
}
luaL_openlibs(lua);
luaA::object_setup(lua);
Lua(lua)
}
}
/// Constructs the Lua object from an already initialized Lua context.
/// # Safety
/// You should not use the passed in Lua state after this
/// except through the `Lua` interface (and that includes
/// using the raw pointer directly)
pub unsafe fn from_ptr(lua: *mut lua_State) -> Self {
Lua(lua)
}
/// Loads and runs the Lua file that the path points to.
pub fn load_and_run(&self, path: PathBuf) -> Result<(), LuaErr> {
let path_str = path.to_str()
.ok_or_else(||
LuaErr::Load(FFIErr::InvalidUTF(format!("{:?}", path))))
.and_then(|s| CString::new(s)
.map_err(|_|
LuaErr::Load(FFIErr::NullByte(format!("{:?}", path)))))?;
unsafe {
let lua = &mut *self.0;
let mut status = luaL_loadfile(lua, path_str.as_ptr());
if status != 0 {
// If something went wrong, error message is at the top of
// the stack.
let error = lua_tostring(lua, -1);
let error = CStr::from_ptr(error).to_string_lossy().into_owned();
Err(LuaErr::FileNotFound(path.clone(), error))?
}
// Run the file
status = lua_pcallk(lua, 0, LUA_MULTRET, 0, 0, None);
if status != 0 {
let error = lua_tostring(lua, -1);
let error = CStr::from_ptr(error).to_string_lossy().into_owned();
Err(LuaErr::Eval(error))?
}
}
::std::mem::forget(path_str);
Ok(())
}
/// Registers the methods in the array to the given variable name.
///
/// The requirement for the name to be static is to ensure that memory
/// does not leak. The mechanism to ensure that names can be dynamically
/// allocated is not available at this time.
pub fn register_methods(&self, name: &'static str, methods: &[luaL_Reg])
-> Result<(), LuaErr> {
unsafe {
let l = self.0;
// NOTE: This is safe because we guarentee that name is static
let c_name = CStr::from_bytes_with_nul(name.as_bytes())
.map_err(|_| LuaErr::EvalFFI(FFIErr::NullByte(name.into())))?;
let result = luaL_newmetatable(l, c_name.as_ptr());
::std::mem::forget(c_name);
if result == ALREADY_DEFINED {
// variable is still pushed to the stack
lua_pop(l, 1);
return Err(LuaErr::AlreadyDefined(name.into()))
}
/* Set __index to be the metatable */
// move meta table to top of stack
lua_pushvalue(l, -1);
// Set the __index to be the metatable
// NOTE Pops the value from the stack
lua_setfield(l, -2, c_str!("__index"));
/* Add the methods to the table */
lua_newtable(l);
luaL_setfuncs(l, methods.as_ptr(), 0);
lua_pushvalue(l, -1);
// NOTE Pops the value from the stack
lua_setglobal(l, c_name.as_ptr());
/* Set "self" to be the metatable */
lua_pushvalue(l, -1);
lua_setmetatable(l, -2);
// Pop the table we made, as well as the metatable
lua_pop(l, 2);
}
Ok(())
}
/// Loads the library given by `lib_path` and stores the result
/// in a global named `name`.
pub fn load_library(&self, name: &str, lib_path: PathBuf)
-> Result<(), LuaErr> {
unsafe {
let c_name = CString::new(name)
.expect("Name contained null bytes");
self.load_and_run(lib_path)?;
println!("Setting global to {:#?}", c_name);
lua_setglobal(self.0, c_name.as_ptr());
::std::mem::forget(c_name);
}
Ok(())
}
/// Adds a variable number of paths to the lookup path within lua.
/// These lookup path is used to find libraries.
///
/// Note that these values are appended on to the current lookup path.
pub fn add_lib_lookup_path(&self, paths: &[PathBuf]) -> Result<(), LuaErr> {
let len = paths.len();
unsafe {
let lua = self.0;
// Push these on to the stack
lua_getglobal(lua, c_str!("package"));
lua_getfield(lua, 1, c_str!("path"));
for path in paths {
let c_path = path.to_str()
.ok_or_else(||
LuaErr::EvalFFI(FFIErr::InvalidUTF(path.to_str().unwrap().into())))
.and_then(|s| CString::new(s)
.map_err(|_|
LuaErr::EvalFFI(FFIErr::NullByte(path.to_str().unwrap().into()))))?;
lua_pushfstring(lua, c_path.as_ptr());
::std::mem::forget(c_path);
}
// concatenate with thing on top of the stack when we were called
// + 1 because we want to include the path.
lua_concat(lua, (len + 1) as i32);
// Now set the path to that value
lua_setfield(lua, 1, c_str!("path"));
// pop "package"
lua_pop(lua, 1);
}
Ok(())
}
/// Sets up the lookup path to include the default awesoem libs.
/// These libraries are found in `/usr/share/awesome/lib`.
///
/// Note that these values are added to the current lookup path,
/// so if you want them to take precedence call this method earlier than
/// other methods that modify the path
/// (e.g [add_lib_lookup_path](add_lib_lookup_path)).
pub fn add_default_awesome_libs(&self) -> Result<(), LuaErr>{
self.add_lib_lookup_path(&[";/usr/share/awesome/lib/?.lua;".into(),
";/usr/share/awesome/lib/?/init.lua;".into()
])
}
}
impl Deref for Lua {
type Target = lua_State;
fn deref(&self) -> &Self::Target {
unsafe {
&*self.0
}
}
}
impl DerefMut for Lua {
fn deref_mut(&mut self) -> &mut lua_State {
unsafe {
&mut *self.0
}
}
}
/// These are methods that are straight Rust-version copies of their equivalents
/// defined in the Awesome library.
///
/// They should not be used directly and instead you should use [Lua](./Lua).
#[allow(non_snake_case)]
pub mod luaA {
use lua_sys::*;
use libc;
use std::process::Command;
use std::cell::Cell;
use std::ffi::{CString, CStr};
use std::collections::LinkedList;
use std::collections::hash_map::DefaultHasher;
use std::hash::Hasher;
use std::sync::{Mutex, RwLock};
use ::object::Property;
use ::object::class::{Class, Object, AllocatorF, CheckerF, CollectorF,
PropF};
use callbacks::button::ButtonState;
// This weird line is so that I can use luaA namespace explicitly here.
use super::luaA;
// Global button class definitions
lazy_static! {
pub static ref BUTTON_CLASS: RwLock<Class> =
RwLock::new(Class::default());
pub static ref CLIENT_CLASS: RwLock<Class> =
RwLock::new(Class::default());
pub static ref DRAWIN_CLASS: RwLock<Class> =
RwLock::new(Class::default());
pub static ref DRAWABLE_CLASS: RwLock<Class> =
RwLock::new(Class::default());
pub static ref SCREEN_CLASS: RwLock<Class> =
RwLock::new(Class::default());
}
const NULL: *mut libc::c_void = 0 as _;
pub struct ClassWrapper(*mut Class);
unsafe impl Send for ClassWrapper {}
unsafe impl Sync for ClassWrapper {}
impl ClassWrapper {
pub fn new(class: *mut Class) -> Self {
ClassWrapper(class)
}
}
lazy_static! {
/// Lua function to call on dofunction() error
pub static ref ERROR_FUNC: RwLock<lua_CFunction> =
RwLock::new(None);
pub static ref CLASSES: Mutex<LinkedList<ClassWrapper>> =
Mutex::new(LinkedList::new());
}
// TODO move this somewhere else...
#[repr(C)]
pub struct area_t {
pub x: i16,
pub y: i16,
pub width: u16,
pub height: u16
}
pub unsafe fn openlib(lua: *mut lua_State, name: *const libc::c_char,
methods: &[luaL_Reg], meta: &[luaL_Reg]) {
luaL_newmetatable(lua, name);
lua_pushvalue(lua, -1); // dup meta table
lua_setfield(lua, -2, c_str!("__index")); // metatable.__index = metatable
luaA::registerlib(lua, ::std::ptr::null_mut(), meta);
luaA::registerlib(lua, name, methods);
lua_pushvalue(lua, -1); // dup self as meta table
lua_setmetatable(lua, -2); // set self as meta table
lua_pop(lua, 2);
}
pub unsafe fn registerlib(lua: *mut lua_State, libname: *const libc::c_char,
l: &[luaL_Reg]) {
if ! libname.is_null() {
lua_newtable(lua);
luaL_setfuncs(lua, l.as_ptr(), 0);
lua_pushvalue(lua, -1);
lua_setglobal(lua, libname);
} else {
luaL_setfuncs(lua, l.as_ptr(), 0);
}
}
pub unsafe fn typeerror(lua: *mut lua_State, narg: libc::c_int,
tname: *const libc::c_char) -> libc::c_int {
let msg = lua_pushfstring(lua, c_str!("%s expected, got %s"),
tname, lua_typename(lua, narg));
luaL_traceback(lua, lua, ::std::ptr::null_mut(), 2);
lua_concat(lua, 2);
return luaL_argerror(lua, narg, msg);
}
pub unsafe fn rangeerror(lua: *mut lua_State, narg: libc::c_int,
min: libc::c_double, max: libc::c_double)
-> libc::c_int {
let msg = lua_pushfstring(lua, c_str!("value in [%f, %f] expected, got %f"),
min, max, lua_tonumber(lua, narg) as libc::c_double);
luaL_traceback(lua, lua, ::std::ptr::null_mut(), 2);
lua_concat(lua, 2);
return luaL_argerror(lua, narg, msg);
}
pub unsafe fn getuservalue(lua: *mut lua_State, idx: libc::c_int) {
lua_getuservalue(lua, idx);
}
pub unsafe fn setuservalue(lua: *mut lua_State, idx: libc::c_int) {
lua_setuservalue(lua, idx);
}
pub unsafe fn rawlen(lua: *mut lua_State, idx: libc::c_int) -> libc::size_t {
return lua_rawlen(lua, idx) as libc::size_t ;
}
pub unsafe fn checkfunction(lua: *mut lua_State, idx: libc::c_int) {
let lua_t = lua_type(lua, idx);
let is_function = lua_t == LUA_TFUNCTION as i32;
if ! is_function {
luaA::typeerror(lua, idx, c_str!("function"));
}
}
pub unsafe fn checkboolean(lua: *mut lua_State, n: libc::c_int) -> libc::c_int {
if lua_type(lua, n) != LUA_TBOOLEAN as i32 {
luaA::typeerror(lua, n, c_str!("boolean"));
}
return lua_toboolean(lua, n);
}
pub unsafe fn getopt_number(lua: *mut lua_State, idx: libc::c_int,
name: *const libc::c_char, mut def: lua_Number)
-> lua_Number {
lua_getfield(lua, idx, name);
let is_nil = lua_type(lua, -1) == LUA_TNIL as i32;
let is_num = lua_isnumber(lua, -1) != 0;
if is_nil || is_num {
def = luaL_optnumber(lua, -1, def);
}
lua_pop(lua, 1);
return def;
}
pub unsafe fn checknumber_range(lua: *mut lua_State, n: libc::c_int,
min: lua_Number, max: lua_Number)
-> lua_Number {
let result = lua_tonumber(lua, n);
if result < min || result > max {
luaA::rangeerror(lua, n, min, max);
}
return result;
}
pub unsafe fn optnumber_range(lua: *mut lua_State, narg: libc::c_int,
def: lua_Number, min: lua_Number,
max: lua_Number) -> lua_Number {
let lua_t = lua_type(lua, narg);
let is_none_or_nil = lua_t == LUA_TNIL as i32 || lua_t == LUA_TNONE;
if is_none_or_nil {
return def;
}
return luaA::checknumber_range(lua, narg, min, max);
}
pub unsafe fn getopt_number_range(lua: *mut lua_State, idx: libc::c_int,
name: *const libc::c_char,
mut def: lua_Number, min: lua_Number,
max: lua_Number) -> lua_Number {
lua_getfield(lua, idx, name);
let is_nil = lua_type(lua, -1) == LUA_TNIL as i32;
let is_number = lua_isnumber(lua, -1) != 0;
if is_nil || is_number {
def = luaA::optnumber_range(lua, -1, def, min, max);
}
lua_pop(lua, 1);
return def;
}
pub unsafe fn checkinteger(lua: *mut lua_State, n: libc::c_int) -> libc::c_int {
let d = lua_tonumber(lua, n);
let need_to_round = d != (d as libc::c_int as lua_Number);
if need_to_round {
luaA::typeerror(lua, n, c_str!("integer"));
}
return d as libc::c_int;
}
pub unsafe fn optinteger(lua: *mut lua_State, narg: libc::c_int, def: lua_Integer)
-> lua_Integer {
return ::utils::luaL_opt(lua, |lua, n| luaA::checkinteger(lua, n) as lua_Integer, narg, def);
}
pub unsafe fn getopt_integer(lua: *mut lua_State, idx: libc::c_int,
name: *const libc::c_char,
mut def: lua_Integer) -> libc::c_int {
lua_getfield(lua, idx, name);
let lua_t = lua_type(lua, -1);
let is_nil = lua_t == LUA_TNIL as i32;
let is_number = lua_t == LUA_TNUMBER as i32;
if is_nil || is_number {
def = luaA::optinteger(lua, -1, def);
}
lua_pop(lua, 1);
return def as libc::c_int;
}
pub unsafe fn checkinteger_range(lua: *mut lua_State, n: libc::c_int,
min: lua_Number, max: lua_Number)
-> libc::c_int {
let result = luaA::checkinteger(lua, n) as lua_Number;
if result < min || result > max {
luaA::rangeerror(lua, n, min, max);
}
return result as libc::c_int;
}
pub unsafe fn optinteger_range(lua: *mut lua_State, narg: libc::c_int,
def: lua_Integer, min: lua_Number,
max: lua_Number) -> lua_Integer {
let lua_t = lua_type(lua, narg);
let is_none_or_nil = lua_t == LUA_TNIL as i32 || lua_t == LUA_TNONE;
if is_none_or_nil {
return def;
}
return luaA::checkinteger_range(lua, narg, min, max) as lua_Integer;
}
pub unsafe fn getopt_integer_range(lua: *mut lua_State, idx: libc::c_int,
name: *const libc::c_char,
mut def: lua_Integer,
min: lua_Number, max: lua_Number)
-> libc::c_int {
lua_getfield(lua, idx, name);
let lua_t = lua_type(lua, -1);
let is_nil = lua_t == LUA_TNIL as i32;
let is_number = lua_t == LUA_TNUMBER as i32;
if is_nil || is_number {
def = luaA::optinteger_range(lua, -1, def, min, max);
}
lua_pop(lua, 1);
return def as libc::c_int;
}
pub unsafe fn pusharea(lua: *mut lua_State, geo: area_t) -> libc::c_int {
lua_createtable(lua, 0, 4);
lua_pushinteger(lua, geo.x as lua_Integer);
lua_setfield(lua, -2, c_str!("x"));
lua_pushinteger(lua, geo.y as lua_Integer);
lua_setfield(lua, -2, c_str!("y"));
lua_pushinteger(lua, geo.width as lua_Integer);
lua_setfield(lua, -2, c_str!("width"));
lua_pushinteger(lua, geo.height as lua_Integer);
lua_setfield(lua, -2, c_str!("height"));
return 1;
}
pub unsafe fn register(lua: *mut lua_State, idx: libc::c_int,
ptr: *mut libc::c_int) -> libc::c_int {
lua_pushvalue(lua, idx);
if *ptr != LUA_REFNIL {
luaL_unref(lua, LUA_REGISTRYINDEX, *ptr);
}
*ptr = luaL_ref(lua, LUA_REGISTRYINDEX);
return 0;
}
pub unsafe fn unregister(lua: *mut lua_State, ptr: *mut libc::c_int) {
luaL_unref(lua, LUA_REGISTRYINDEX, *ptr);
*ptr = LUA_REFNIL;
}
pub unsafe fn registerfct(lua: *mut lua_State, idx: libc::c_int,
fct: *mut libc::c_int) -> libc::c_int {
luaA::checkfunction(lua, idx);
return luaA::register(lua, idx, fct);
}
pub unsafe fn default_index(lua: *mut lua_State) -> libc::c_int {
return luaA::class_index_miss_property(lua, ::std::ptr::null_mut());
}
pub unsafe fn default_newindex(lua: *mut lua_State) -> libc::c_int {
return luaA::class_newindex_miss_property(lua, ::std::ptr::null_mut());
}
pub unsafe fn class_index_miss_property(lua: *mut lua_State,
_object: *mut Object)
-> libc::c_int {
use object::{GLOBAL_SIGNALS, signal_object_emit};
let global_signals = GLOBAL_SIGNALS.lock().unwrap();
signal_object_emit(lua, &*global_signals, "debug::index::miss", 2);
return 0
}
pub unsafe fn class_newindex_miss_property(lua: *mut lua_State,
_object: *mut Object)
-> libc::c_int {
use object::{GLOBAL_SIGNALS, signal_object_emit};
let global_signals = GLOBAL_SIGNALS.lock().unwrap();
signal_object_emit(lua, &*global_signals, "debug::newindex::miss", 3);
return 0
}
pub unsafe fn object_push(lua: *mut lua_State, ptr: *mut libc::c_void) -> libc::c_int {
luaA::object_registry_push(lua);
lua_pushlightuserdata(lua, ptr);
lua_rawget(lua, -2);
::lua::lua_remove(lua, -2);
return 1;
}
pub unsafe fn object_registry_push(lua: *mut lua_State) {
lua_pushstring(lua, c_str!("awesome.object.registry"));
lua_rawget(lua, LUA_REGISTRYINDEX);
}
pub unsafe fn dofunction(lua: *mut lua_State, nargs: libc::c_int,
nret: libc::c_int) -> libc::c_int {
/* Move function before arguments */
::lua::lua_insert(lua, - nargs - 1);
/* Push error handling function */
lua_pushcfunction(lua, Some(luaA::dofunction_error));
/* Move error handling function before args and functions */
::lua::lua_insert(lua, - nargs -2);
let error_func_pos = lua_gettop(lua) - nargs -1;
if lua_pcallk(lua, nargs, nret, - nargs -2, 0, None) != 0{
eprintln!("{:?}", lua_tostring(lua, -1));
/* Remove error function and error string */
lua_pop(lua, 2);
return 0;
}
/* Remove error function */
::lua::lua_remove(lua, error_func_pos);
return 1;
}
pub unsafe extern fn dofunction_error(lua: *mut lua_State) -> libc::c_int {
match ERROR_FUNC.read() {
Ok(error_f_guard) => {
if let Some(error_f) = *error_f_guard {
return error_f(lua)
}
},
_ => {}
}
0
}
pub unsafe fn checktable(lua: *mut lua_State, idx: libc::c_int) {
let istable = lua_type(lua, idx) == LUA_TTABLE as i32;
if !istable {
luaA::typeerror(lua, idx, c_str!("table"));
}
}
pub unsafe fn class_property_get(lua: *mut lua_State, mut class: *const Class,
fieldidx: libc::c_int) -> *const Property {
/* Lookup the property using token */
let attr = CStr::from_ptr(
luaL_checklstring(lua, fieldidx,
::std::ptr::null_mut()) as *mut _)
.to_string_lossy().to_owned();
/* Look for the property in the class; if not found, go in the parent class. */
while ! class.is_null() {
if let Some(prop) = (*class).properties.iter()
.find(|prop| prop.name == attr) {
return prop as *const _;
}
class = (*class).parent;
}
::std::mem::forget(attr);
return ::std::ptr::null_mut();
}
pub unsafe extern fn button_new(lua: *mut lua_State) -> libc::c_int {
luaA::class_new(lua, &*BUTTON_CLASS)
}
pub unsafe fn class_new(lua: *mut lua_State, global_class: &RwLock<Class>)
-> libc::c_int {
/* Check we have a table that should contains some properties */
luaA::checktable(lua, 2);
/* Create a new object */
let class = global_class.try_read().unwrap();
let object_ptr = (class.allocator.unwrap())(lua);
/* Push the first key before iterating */
lua_pushnil(lua);
/* Iterate over the property keys */
while lua_next(lua, 2) != 0 {
/* Check that the key is a string.
* We cannot call tostring blindly or Lua will convert a key that is a
* number TO A STRING, confusing lua_next() */
let is_string = lua_type(lua, -2) == LUA_TSTRING as i32;
if is_string {
let prop = luaA::class_property_get(lua, &*class as _, -2);
if !prop.is_null() && (*prop).new.is_some() {
(*prop).new.unwrap()(lua, object_ptr);
}
}
/* Remove value */
lua_pop(lua, 1);
}
1
}
pub unsafe fn class_get(lua: *mut lua_State, idx: libc::c_int)
-> *mut Class {
let ty = lua_type(lua, idx);
if ty == LUA_TUSERDATA as i32 && lua_getmetatable(lua, idx) != 0 {
/* Use the metatable has key to get the class from registry */
lua_rawget(lua, LUA_REGISTRYINDEX);
let class = lua_touserdata(lua, -1) as *mut Class;
lua_pop(lua, 1);
return class;
}
return ::std::ptr::null_mut();
}
pub unsafe extern fn class_newindex_invalid(lua: *mut lua_State)
-> libc::c_int {
return luaL_error(lua, c_str!("attempt to index an object that \
was already garbage collected"))
}
pub unsafe extern fn class_index_invalid(lua: *mut lua_State)
-> libc::c_int {
let attr = CString::from_raw(
luaL_checklstring(lua, 2, ::std::ptr::null_mut()) as _)
.into_string().unwrap();
if &*attr == "valid" {
lua_pushboolean(lua, 0);
return 1;
}
return luaA::class_newindex_invalid(lua);
}
pub unsafe extern fn class_gc(lua: *mut lua_State) -> libc::c_int {
let item = lua_touserdata(lua, 1) as *mut Object;
(*item).signals.clear();
/* Get the object class */
let class = luaA::class_get(lua, 1);
let old_instances = (*class).instances.get();
(*class).instances.set(old_instances - 1);
/* Call the collector function of the class, and all its parent classes */
let mut cur_class = class;
while ! cur_class.is_null() {
if let Some(collector) = (*class).collector {
collector(item);
}
cur_class = (*cur_class).parent
}
/* Unset its metatable so that e.g. luaA_toudata() will no longer accept
* this object. This is needed since other __gc methods can still use this.
* We also make sure that `item.valid == false`.
*/
lua_newtable(lua);
lua_pushcfunction(lua, Some(luaA::class_index_invalid));
lua_setfield(lua, -2, c_str!("__index"));
lua_pushcfunction(lua, Some(luaA::class_newindex_invalid));
lua_setfield(lua, -2, c_str!("__newindex"));
lua_setmetatable(lua, 1);
return 0;
}
pub unsafe fn object_setup(lua: *mut lua_State) {
/* Push identification string */
lua_pushstring(lua, c_str!("awesome.object.registry"));
/* Create an empty table */
lua_newtable(lua);
/* Create an empty metatable */
lua_newtable(lua);
/* Set this empty table as the registry metatable.
* It's used to store the number of reference on stored objects. */
lua_setmetatable(lua, -2);
/* Register table inside registry */
lua_rawset(lua, LUA_REGISTRYINDEX);
}
pub unsafe fn class_setup(lua: *mut lua_State, class: *mut Class,
name: *const libc::c_char,
parent: *mut Class,
allocator: AllocatorF,
collector: Option<CollectorF>,
checker: Option<CheckerF>,
index_miss_property: Option<PropF>,
newindex_miss_property: Option<PropF>,
methods: &[luaL_Reg],
meta: &[luaL_Reg]) {
/* Create the object metatable */
lua_newtable(lua);
/* Register it with class pointer as key in the registry
* class-pointer -> metatable */
lua_pushlightuserdata(lua, class as _);
/* Duplicate the object metatable */
lua_pushvalue(lua, -2);
lua_rawset(lua, LUA_REGISTRYINDEX);
/* Now register class pointer with metatable as key in the registry
* metatable -> class-pointer */
lua_pushvalue(lua, -1);
lua_pushlightuserdata(lua, class as _);
lua_rawset(lua, LUA_REGISTRYINDEX);
/* Duplicate objects metatable */
lua_pushvalue(lua, -1);
/* Set garbage collector in the metatable */
lua_pushcfunction(lua, Some(luaA::class_gc));
lua_setfield(lua, -2, c_str!("__gc"));
lua_setfield(lua, -2, c_str!("__index")); /* metatable.__index = metatable 1 */
luaA::registerlib(lua, ::std::ptr::null_mut(), meta); /* 1 */
luaA::registerlib(lua, name, methods); /* 2 */
lua_pushvalue(lua, -1); /* dup self as metatable 3 */
lua_setmetatable(lua, -2); /* set self as metatable 2 */
lua_pop(lua, 2);
(*class).collector = collector;
(*class).allocator = Some(allocator);
(*class).name = CString::from_raw(name as _).into_string().unwrap();
(*class).index_miss_prop = index_miss_property;
(*class).newindex_miss_prop = newindex_miss_property;
(*class).checker = checker;
(*class).parent = parent;
(*class).tostring = None;
(*class).instances = Cell::new(0);
(*class).index_miss_handler = LUA_REFNIL;
(*class).newindex_miss_handler = LUA_REFNIL;
luaA::CLASSES.lock().unwrap().push_back(ClassWrapper::new(class));
}
pub unsafe extern fn object_tostring(lua: *mut lua_State) -> libc::c_int {
let mut lua_class = luaA::class_get(lua, 1);
if lua_class.is_null() {
eprintln!("lua class was null!");
return 0;
}
let object = luaA::checkudata(lua, 1, lua_class);
let mut offset = 0;
while ! lua_class.is_null() {
if offset != 0 {
lua_pushstring(lua, c_str!("/"));
::lua::lua_insert(lua, -{offset += 1; offset});
}
let name = CString::new((*lua_class).name.clone()).unwrap();
lua_pushstring(lua, name.as_ptr());
::lua::lua_insert(lua, -{offset += 1; offset});
::std::mem::forget(name);
if let Some(tostring) = (*lua_class).tostring {
lua_pushstring(lua, c_str!("("));
let n = 2 + tostring(lua, object as _);
lua_pushstring(lua, c_str!(")"));
for _ in 0..n {
::lua::lua_insert(lua, -offset);
}
offset += n;
}
lua_class = (*lua_class).parent;
}
lua_pushfstring(lua, c_str!(": %p"), object);
lua_concat(lua, offset + 1);
1
}
pub unsafe fn toudata(lua: *mut lua_State, ud: libc::c_int,
class: *mut Class) -> *mut libc::c_void {
let p = lua_touserdata(lua, ud);
/* does it have a metatable? */
if ! p.is_null() && lua_getmetatable(lua, ud) != 0 {
/* Get the lua_class_t that matches this metatable */
lua_rawget(lua, LUA_REGISTRYINDEX);
let mut metatable_class = lua_touserdata(lua, -1) as *mut Class;
/* remove lightuserdata (lua_class pointer) */
lua_pop(lua, 1);
/* Now, check that the class given in argument is the same as the
* metatable's object, or one of its parent (inheritance) */
while ! metatable_class.is_null() {
if metatable_class == class {
return p;
}
metatable_class = (*metatable_class).parent;
}
}
return ::std::ptr::null_mut();
}
pub unsafe fn checkudata(lua: *mut lua_State, ud: libc::c_int,
class: *mut Class) -> *mut libc::c_void {
let p = luaA::toudata(lua, ud, class);
if p.is_null() {
let name = CString::new((*class).name.clone()).unwrap();
luaA::typeerror(lua, ud, name.as_ptr());
::std::mem::forget(name);
} else if let Some(checker) = (*class).checker {
checker(p as _);
luaL_error(lua, c_str!("invalid object"));
}
return p;
}
pub unsafe fn checkudataornil(lua: *mut lua_State, udx: libc::c_int,
class: *mut Class) -> *mut libc::c_void {
let lua_t = lua_type(lua, udx);
if lua_t == LUA_TNIL as _ {
return ::std::ptr::null_mut()
}
checkudata(lua, udx, class)
}
pub unsafe fn object_incref(lua: *mut lua_State, tud: libc::c_int,
oud: libc::c_int) -> *mut libc::c_void {
/* Get pointer value of the item */
let pointer = lua_topointer(lua, oud) as *mut libc::c_void;
/* Not reference able. */
if pointer.is_null() {
::lua::lua_remove(lua, oud);
return ::std::ptr::null_mut();
}
/* Push the pointer (key) */
lua_pushlightuserdata(lua, pointer);
/* Push the data (value) */
lua_pushvalue(lua, if oud < 0 {oud - 1} else {oud});
/* table.lightudata = data */
lua_rawset(lua, if tud < 0 {tud - 2} else {tud});
/* refcount++ */
/* Get the metatable */
lua_getmetatable(lua, tud);
/* Push the pointer (key) */
lua_pushlightuserdata(lua, pointer);
/* Get the number of references */
lua_rawget(lua, -2);
/* Get the number of references and increment it */
let count = lua_tointeger(lua, -1) + 1;
lua_pop(lua, 1);
/* Push the pointer (key) */
lua_pushlightuserdata(lua, pointer);
/* Push count (value) */
lua_pushinteger(lua, count);
/* Set metatable[pointer] = count */
lua_rawset(lua, -3);
/* Pop metatable */
lua_pop(lua, 1);
/* Remove referenced item */
::lua::lua_remove(lua, oud);
return pointer;
}
pub unsafe fn object_decref(lua: *mut lua_State, tud: libc::c_int,
pointer: *mut libc::c_void) {
if pointer.is_null() {
return;
}
/* First, refcount-- */
/* Get the metatable */
lua_getmetatable(lua, tud);
/* Push the pointer (key) */
lua_pushlightuserdata(lua, pointer);
/* Get the number of references */
lua_rawget(lua, -2);
/* Get the number of references and decrement it */
let count = lua_tointeger(lua, -1) - 1;
/* Did we find the item in our table? (tointeger(nil)-1) is -1 */
if count < 0 {
eprintln!("BUG: Reference not found");
/* Pop reference count and metatable */
lua_pop(lua, 2);
return;
}
lua_pop(lua, 1);
/* Push the pointer (key) */
lua_pushlightuserdata(lua, pointer);
/* Hasn't the ref reached 0? */
if count != 0 {
lua_pushinteger(lua, count);
}
else {
/* Yup, delete it, set nil as value */
lua_pushnil(lua);
}
/* Set meta[pointer] = count/nil */
lua_rawset(lua, -3);
/* Pop metatable */
lua_pop(lua, 1);
/* Wait, no more ref? */
if count == 0 {
/* Yes? So remove it from table */
lua_pushlightuserdata(lua, pointer);
/* Push nil as value */
lua_pushnil(lua);
/* table[pointer] = nil */
lua_rawset(lua, if tud < 0 {tud - 2} else {tud});
}
}
pub unsafe fn object_ref_item(lua: *mut lua_State, ud: libc::c_int,
mut iud: libc::c_int) -> *mut libc::c_void {
/* Get the env table from the object */
luaA::getuservalue(lua, ud);
iud = if iud < 0 { iud - 1} else { iud };
let pointer = luaA::object_incref(lua, -1, iud);
/* Remove env table */
lua_pop(lua, 1);
return pointer;
}
pub unsafe fn object_unref_item(lua: *mut lua_State, ud: libc::c_int,
ptr: *mut libc::c_void) {
/* Get the env table from the object */
luaA::getuservalue(lua, ud);
/* Decrement */
luaA::object_decref(lua, -1, ptr);
/* Remove env table */
lua_pop(lua, 1);
}
pub unsafe fn object_connect_signal_simple_from_stack(lua: *mut lua_State,
oud: libc::c_int,
name: *mut libc::c_char,
ud: libc::c_int) {
luaA::checkfunction(lua, ud);
let obj = lua_touserdata(lua, oud) as *mut Object;
let ref_item = luaA::object_ref_item(lua, oud, ud);
::object::signal::signal_connect(&mut (*obj).signals, name, ref_item);
}
pub unsafe extern fn object_connect_signal_simple(lua: *mut lua_State)
-> libc::c_int {
let check_string = luaL_checklstring(lua, 2, ::std::ptr::null_mut());
luaA::object_connect_signal_simple_from_stack(lua,
1,
check_string as _,
3);
0
}
pub unsafe fn object_disconnect_signal_simple_from_stack(
lua: *mut lua_State, oud: libc::c_int, name: *const libc::c_char,
ud: libc::c_int) {
luaA::checkfunction(lua, ud);
let obj = lua_touserdata(lua, oud) as *mut Object;
let ptr = lua_topointer(lua, ud) as _;
if ::object::signal::signal_disconnect(&mut (*obj).signals,
name,
ptr) != 0 {
luaA::object_unref_item(lua, oud, ptr);
}
::lua::lua_remove(lua, ud);
}
pub unsafe extern fn object_disconnect_signal_simple(
lua: *mut lua_State) -> libc::c_int {
let check_string = luaL_checklstring(lua, 2, ::std::ptr::null_mut());
luaA::object_disconnect_signal_simple_from_stack(lua, 1,
check_string as _,
3);
0
}
pub unsafe extern fn absindex(lua: *mut lua_State, ud: libc::c_int)
-> libc::c_int {
if ud > 0 || ud <= LUA_REGISTRYINDEX {
ud
} else {
lua_gettop(lua) + ud + 1
}
}
pub unsafe fn object_push_item(lua: *mut lua_State, ud: libc::c_int,
pointer: *mut libc::c_void) -> libc::c_int {
/* Get env table of the object */
luaA::getuservalue(lua, ud);
/* Push key */
lua_pushlightuserdata(lua, pointer);
/* Get env.pointer */
lua_rawget(lua, -2);
/* Remove env table */
::lua::lua_remove(lua, -2);
return 1;
}
pub unsafe fn class_emit_signal(lua: *mut lua_State, class: *const Class,
name: *const libc::c_char,
nargs: libc::c_int) {
let name = CStr::from_ptr(name).to_str().unwrap();
::object::signal::signal_object_emit(lua, &(*class).signals, name, nargs)
}
pub unsafe extern fn object_emit_signal(lua: *mut lua_State,
oud: libc::c_int,
name: *const libc::c_char,
nargs: libc::c_int) {
let oud_abs = luaA::absindex(lua, oud);
let lua_class = luaA::class_get(lua, oud);
let obj = luaA::toudata(lua, oud, lua_class) as *mut Class;
if obj.is_null() {
eprintln!("Trying to emit signal '{:?}' on non object", name);
return;
} else if let Some(checker) = (*lua_class).checker {
checker(obj as _);
eprintln!("Trying to emit signal '{:?}' on invalid object", name);
return;
}
let mut hasher = DefaultHasher::new();
hasher.write(CStr::from_ptr(name).to_str().unwrap().as_bytes());
let id = hasher.finish();
if let Some(sig) = (*obj).signals.iter_mut().find(|sig| sig.id == id) {
let nbfunc = sig.sigfuncs.len() as i32;
luaL_checkstack(lua, nbfunc + nargs + 2, c_str!("too much signal"));
/* Push all functions and then execute, because this list can change
* while executing funcs. */
for sigfunc in &mut sig.sigfuncs {
luaA::object_push_item(lua, oud_abs, sigfunc as *mut _ as *mut _);
}
for i in 0..nbfunc {
/* push object */
lua_pushvalue(lua, oud_abs);
/* push all args */
for _ in 0..nargs {
lua_pushvalue(lua, - nargs - nbfunc - 1 + i);
}
/* push first function */
lua_pushvalue(lua, - nargs - nbfunc - 1 + i);
/* remove this first function */
::lua::lua_remove(lua, - nargs - nbfunc - 2 + i);
luaA::dofunction(lua, nargs + 1, 0);
}
}
/* Then emit signal on the class */
lua_pushvalue(lua, oud);
::lua::lua_insert(lua, - nargs - 1);
luaA::class_emit_signal(lua, luaA::class_get(lua, - nargs - 1), name, nargs + 1);
}
pub unsafe extern fn object_emit_signal_simple(lua: *mut lua_State)
-> libc::c_int {
let check_string = luaL_checklstring(lua, 2, ::std::ptr::null_mut());
luaA::object_emit_signal(lua, 1, check_string, lua_gettop(lua) -2);
0
}
pub unsafe extern fn class_newindex(lua: *mut lua_State) -> libc::c_int {
/* Try to use metatable first. */
if luaA::usemetatable(lua, 1, 2) != 0 {
return 1;
}
let class = luaA::class_get(lua, 1);
let prop = luaA::class_property_get(lua, class, 2);
/* Property does exist and has a newindex callback */
if !prop.is_null()
{
if let Some(newindex) = (*prop).new_index {
return newindex(lua, luaA::checkudata(lua, 1, class) as _);
}
} else {
if (*class).newindex_miss_handler != LUA_REFNIL {
return luaA::class_call_handler(lua, (*class).newindex_miss_handler);
}
if let Some(propF) = (*class).newindex_miss_prop {
return propF(lua, luaA::checkudata(lua, 1, class) as _);
}
}
return 0;
}
pub unsafe extern fn class_index(lua: *mut lua_State) -> libc::c_int {
/* Try to use metatable first. */
if luaA::usemetatable(lua, 1, 2) != 0 {
return 1;
}
let class = luaA::class_get(lua, 1);
/* Is this the special 'valid' property? This is the only property
* accessible for invalid objects and thus needs special handling. */
let attr = luaL_checklstring(lua, 2, NULL as _);
let attr_str = CStr::from_ptr(attr).to_str().unwrap();
if attr_str == "valid" {
let p = luaA::toudata(lua, 1, class) as _;
if let Some(checker) = (*class).checker {
let res = {
if p != NULL as _ {
if checker(p) {1} else {0}
} else {
0
}
};
lua_pushboolean(lua, res);
} else {
lua_pushboolean(lua, if p != NULL as _ {1} else {0});
}
return 1;
}
let prop = luaA::class_property_get(lua, class, 2);
/* Is this the special 'data' property? This is available on all objects and
* thus not implemented as a lua_class_property_t.
*/
if attr_str == "data" {
luaA::checkudata(lua, 1, class);
luaA::getuservalue(lua, 1);
lua_getfield(lua, -1, c_str!("data"));
return 1;
}
/* Property does exist and has an index callback */
if ! prop.is_null() {
if let Some(indexF) = (*prop).index {
indexF(lua, luaA::checkudata(lua, 1, class) as _);
}
} else {
if (*class).index_miss_handler != LUA_REFNIL {
return luaA::class_call_handler(lua, (*class).index_miss_handler);
}
if let Some(propF) = (*class).index_miss_prop {
return propF(lua, luaA::checkudata(lua, 1, class) as _);
}
}
return 0;
}
pub unsafe fn usemetatable(lua: *mut lua_State, idxobj: libc::c_int,
idxfield: libc::c_int) -> libc::c_int {
let mut class = luaA::class_get(lua, idxobj);
while ! class.is_null() {
/* Push the class */
lua_pushlightuserdata(lua, class as _);
/* Get its metatable from registry */
lua_rawget(lua, LUA_REGISTRYINDEX);
/* Push the field */
lua_pushvalue(lua, idxfield);
/* Get the field in the metatable */
lua_rawget(lua, -2);
/* Do we have a field like that? */
let is_nil = lua_type(lua, -1) == LUA_TNIL as i32;
if !is_nil {
/* Yes, so remove the metatable and return it! */
::lua::lua_remove(lua, -2);
return 1;
}
/* No, so remove the metatable and its value */
lua_pop(lua, 2);
class = (*class).parent;
}
return 0;
}
pub unsafe fn class_call_handler(lua: *mut lua_State, handler: libc::c_int)
-> libc::c_int {
/* This is based on luaA_dofunction, but allows multiple return values */
assert!(handler != LUA_REFNIL);
let nargs = lua_gettop(lua);
/* Push error handling function and move it before args */
lua_pushcfunction(lua, Some(luaA::dofunction_error));
::lua::lua_insert(lua, - nargs - 1);
let error_func_pos = 1;
/* push function and move it before args */
lua_rawgeti(lua, LUA_REGISTRYINDEX, handler as _);
::lua::lua_insert(lua, - nargs - 1);
if lua_pcallk(lua, nargs, LUA_MULTRET, error_func_pos, 0, None) != 0 {
eprintln!("{:?}", lua_tostring(lua, -1));
/* Remove error function and error string */
lua_pop(lua, 2);
return 0;
}
/* Remove error function */
::lua::lua_remove(lua, error_func_pos);
return lua_gettop(lua);
}
pub unsafe fn settype(lua: *mut lua_State, class: *const Class)
-> libc::c_int {
lua_pushlightuserdata(lua, class as _);
lua_rawget(lua, LUA_REGISTRYINDEX);
lua_setmetatable(lua, -2);
return 1;
}
pub unsafe fn class_connect_signal(lua: *mut lua_State, class: *mut Class,
name: *const libc::c_char,
func: lua_CFunction) {
lua_pushcfunction(lua, func);
luaA::class_connect_signal_from_stack(lua, class, name, -1);
}
pub unsafe fn object_ref(lua: *mut lua_State, oud: libc::c_int)
-> *mut libc::c_void {
luaA::object_registry_push(lua);
let p = luaA::object_incref(lua, -1, if oud < 0 {oud - 1} else {oud});
lua_pop(lua, 1);
return p as _;
}
pub unsafe fn object_unref(lua: *mut lua_State, ptr: *mut libc::c_void) {
luaA::object_registry_push(lua);
luaA::object_decref(lua, -1, ptr as _);
lua_pop(lua, 1);
}
pub unsafe fn class_disconnect_signal_from_stack(lua: *mut lua_State,
class: *mut Class,
name: *const libc::c_char,
ud: libc::c_int) {
use ::object::signal::signal_disconnect;
luaA::checkfunction(lua, ud);
let ptr = lua_topointer(lua, ud);
if (signal_disconnect(&mut (*class).signals, name, ptr as _)) != 0 {
luaA::object_unref(lua, ptr as _);
}
::lua::lua_remove(lua, ud);
}
pub unsafe fn class_connect_signal_from_stack(lua: *mut lua_State,
class: *mut Class,
name: *const libc::c_char,
ud: libc::c_int) {
luaA::checkfunction(lua, ud);
::object::signal::signal_connect(&mut (*class).signals,
name,
luaA::object_ref(lua, ud))
}
pub unsafe fn class_add_property<S>(class: *mut Class,
name: S,
new: Option<PropF>,
index: Option<PropF>,
new_index: Option<PropF>)
where S: Into<String>
{
let prop = Property {
name: name.into(),
new,
index,
new_index
};
(*class).properties.push(prop);
}
pub unsafe fn tomodifiers(lua: *mut lua_State, ud: libc::c_int) -> u16 {
use xcb::ffi::base::XCB_NONE;
luaA::checktable(lua, ud);
let len = luaA::rawlen(lua, ud);
let mut modifiers = XCB_NONE;
for i in 1..(len as i64 + 1) {
lua_rawgeti(lua, ud, i);
let key = luaL_checklstring(lua, -1, NULL as _);
let key_str = CStr::from_ptr(key).to_str().unwrap();
modifiers |= super::xutil_key_mask_fromstr(key_str) as _;
lua_pop(lua, 1);
}
return modifiers as _;
}
pub unsafe fn pushmodifiers(lua: *mut lua_State, modifiers: u16)
-> libc::c_int {
use xcb::xproto::*;
lua_newtable(lua);
let mut i = 1;
let mut maski = MOD_MASK_SHIFT;
while maski != MOD_MASK_ANY {
if (maski & modifiers as u32) != 0 {
let modifier = super::xutil_key_mask_tostr(maski);
let modifier_c = CString::new(modifier).unwrap();
lua_pushlstring(lua, modifier_c.as_ptr() as _, modifier.len());
::std::mem::forget(modifier_c);
lua_rawseti(lua, -2, i);
i += 1;
}
maski <<= 1;
}
return 1;
}
pub unsafe fn button_set_button(lua: *mut lua_State, obj: *mut Object)
-> libc::c_int {
let b: *mut ButtonState = obj as _;
(*b).button = luaL_checkinteger(lua, -1) as _;
luaA::object_emit_signal(lua, -3, c_str!("property::button"), 0);
0
}
pub unsafe fn button_set_modifiers(lua: *mut lua_State, obj: *mut Object)
-> libc::c_int {
let b: *mut ButtonState = obj as _;
(*b).modifiers = luaA::tomodifiers(lua, -1);
luaA::object_emit_signal(lua, -3, c_str!("property::modifiers"), 0);
0
}
#[allow(unused_variables)]
pub unsafe fn quit(lua: *mut lua_State) -> libc::c_int {
// TODO FIXME Kill g_main_loop
unimplemented!()
}
pub unsafe fn exec(lua: *mut lua_State) -> libc::c_int {
// TODO I think this is supposed to replace the current process,
// not spawn a new thread.
let cmd_c = luaL_checklstring(lua, 1, NULL as _);
let cmd = CStr::from_ptr(cmd_c).to_string_lossy().into_owned();
let _child = Command::new(cmd)
.spawn()
.expect("Could not spawn child");
0
}
pub unsafe fn awesome_connect_signal(lua: *mut lua_State) -> libc::c_int {
use object::signal::{signal_connect, GLOBAL_SIGNALS};
let name = luaL_checklstring(lua, 1, NULL as _);
luaA::checkfunction(lua, 2);
let mut global_signals = GLOBAL_SIGNALS.try_lock().unwrap();
signal_connect(&mut *global_signals, name, luaA::object_ref(lua, 2));
0
}
pub unsafe fn awesome_disconnect_signal(lua: *mut lua_State) -> libc::c_int {
use object::signal::{signal_disconnect, GLOBAL_SIGNALS};
let name = luaL_checklstring(lua, 1, NULL as _);
luaA::checkfunction(lua, 2);
let func = lua_topointer(lua, 2) as *mut libc::c_void;
let mut global_signals = GLOBAL_SIGNALS.try_lock().unwrap();
if signal_disconnect(&mut *global_signals, name, func) != 0 {
luaA::object_unref(lua, func);
}
0
}
pub unsafe fn awesome_emit_signal(lua: *mut lua_State) -> libc::c_int {
use object::signal::{signal_object_emit, GLOBAL_SIGNALS};
let raw_string = luaL_checklstring(lua, 1, NULL as _);
let string = CStr::from_ptr(raw_string);
let string = string.to_str().unwrap();
let top = lua_gettop(lua) - 1;
let mut global_signals = GLOBAL_SIGNALS.try_lock().unwrap();
signal_object_emit(lua, &mut *global_signals, string, top);
0
}
pub unsafe fn drawable_get_surface(lua: *mut lua_State, obj: *mut Object)
-> libc::c_int {
use ::callbacks::drawable::DrawableState;
let drawable = &mut *(obj as *mut DrawableState);
if let Some(surface_ref) = drawable.surface.as_ref() {
// TODO FIXME This is probably not right
// C calls some cairo_surface_reference thing,
// but I couldn't find that anywhere.
lua_pushlightuserdata(lua, surface_ref.opaque);
} else {
lua_pushnil(lua);
}
1
}
}
use libc;
pub unsafe fn lua_remove(lua: *mut lua_State, idx: libc::c_int) {
lua_rotate(lua, idx, -1);
lua_pop(lua, 1);
}
pub unsafe fn lua_insert(lua: *mut lua_State, idx: libc::c_int) {
lua_rotate(lua, idx, 1);
}
pub unsafe fn lua_isnonornil(lua: *mut lua_State, index: libc::c_int) -> bool {
let ty = lua_type(lua, index);
ty == LUA_TNIL as _ || ty == LUA_TNONE as _
}
// TODO move
/// Gets the key mask associated with the name
fn xutil_key_mask_fromstr(keyname: &str) -> u16 {
use xcb::xproto::*;
use xcb::base::NO_SYMBOL;
let num = match keyname {
"Shift" => MOD_MASK_SHIFT,
"Lock" => MOD_MASK_LOCK,
"Ctrl" | "Control" => MOD_MASK_CONTROL,
"Mod1" => MOD_MASK_1,
"Mod2" => MOD_MASK_2,
"Mod3" => MOD_MASK_3,
"Mod4" => MOD_MASK_4,
"Mod5" => MOD_MASK_5,
"Any" => MOD_MASK_ANY,
_ => NO_SYMBOL
} as u16;
num
}
fn xutil_key_mask_tostr(mask: u32) -> &'static str{
use xcb::xproto::*;
match mask {
MOD_MASK_SHIFT => "Shift",
MOD_MASK_LOCK => "Lock",
MOD_MASK_CONTROL => "Control",
MOD_MASK_1 => "Mod1",
MOD_MASK_2 => "Mod2",
MOD_MASK_3 => "Mod3",
MOD_MASK_4 => "Mod4",
MOD_MASK_5 => "Mod5",
MOD_MASK_ANY => "Any",
_ => "Unknown"
}
}
|
// Copyright 2020 The VectorDB Authors.
//
// Code is licensed under Apache License, Version 2.0.
use crate::errors::{Error, SQLError};
use crate::{expressions::*, planners::*};
pub fn planner_to_expression(planner: Planner) -> Result<Expression, Error> {
match planner {
Planner::Constant(v) => Ok(Expression::from(ConstantExpression::new(v.val))),
Planner::Variable(v) => Ok(Expression::from(VariableExpression::new(v.val))),
Planner::BinaryExpression(v) => {
let left = planner_to_expression(v.left)?;
let right = planner_to_expression(v.right)?;
factory::expression_factory(v.op.as_str(), vec![left, right])
}
_ => Err(Error::from(SQLError::NotImplemented(format!(
"{:?}",
planner,
)))),
}
}
mod tests {
#[test]
fn test_planner_to_expression() {
use crate::datums::Datum;
use crate::expressions::IExpression;
use crate::planners::*;
let c11 = Planner::from(ConstantPlanner::new(Datum::Int32(5)));
let c12 = Planner::from(ConstantPlanner::new(Datum::Int32(3)));
let b11 = Planner::from(BinaryExpressionPlanner::new("+".to_string(), c11, c12));
let c21 = Planner::from(ConstantPlanner::new(Datum::Int32(6)));
let c22 = Planner::from(ConstantPlanner::new(Datum::Int32(4)));
let b21 = Planner::from(BinaryExpressionPlanner::new("+".to_string(), c21, c22));
let b31 = Planner::from(BinaryExpressionPlanner::new("+".to_string(), b11, b21));
let expr = super::planner_to_expression(b31).unwrap();
let actual = expr.eval().unwrap();
let expect = Datum::Int32(18);
assert_eq!(expect, actual);
}
}
|
// pp-exact
enum color { red = 1, green, blue, imaginary = -1, }
fn main() {
test_color(red, 1, "red");
test_color(green, 2, "green");
test_color(blue, 3, "blue");
test_color(imaginary, -1, "imaginary");
}
fn test_color(color: color, val: int, name: str) {
assert (color as int == val);
assert (color as float == val as float);
}
|
#[doc = "Register `HWCFGR0` reader"]
pub type R = crate::R<HWCFGR0_SPEC>;
#[doc = "Field `NUM_CHAN_24` reader - NUM_CHAN_24"]
pub type NUM_CHAN_24_R = crate::FieldReader;
#[doc = "Field `EXTRA_AWDS` reader - Extra analog watchdog"]
pub type EXTRA_AWDS_R = crate::FieldReader;
#[doc = "Field `OVS` reader - Oversampling"]
pub type OVS_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - NUM_CHAN_24"]
#[inline(always)]
pub fn num_chan_24(&self) -> NUM_CHAN_24_R {
NUM_CHAN_24_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - Extra analog watchdog"]
#[inline(always)]
pub fn extra_awds(&self) -> EXTRA_AWDS_R {
EXTRA_AWDS_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - Oversampling"]
#[inline(always)]
pub fn ovs(&self) -> OVS_R {
OVS_R::new(((self.bits >> 8) & 0x0f) as u8)
}
}
#[doc = "Hardware Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr0::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HWCFGR0_SPEC;
impl crate::RegisterSpec for HWCFGR0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hwcfgr0::R`](R) reader structure"]
impl crate::Readable for HWCFGR0_SPEC {}
#[doc = "`reset()` method sets HWCFGR0 to value 0x0110"]
impl crate::Resettable for HWCFGR0_SPEC {
const RESET_VALUE: Self::Ux = 0x0110;
}
|
use std::sync::MutexGuard;
use nia_interpreter_core::Interpreter;
use nia_interpreter_core::NiaInterpreterCommand;
use nia_interpreter_core::NiaInterpreterCommandResult;
use nia_interpreter_core::{EventLoopHandle, NiaDefineActionCommandResult};
use crate::error::{NiaServerError, NiaServerResult};
use crate::protocol::{NiaAction, NiaNamedAction};
use crate::protocol::{NiaActionEnum, NiaDefineActionRequest};
use crate::protocol::{NiaConvertable, Serializable};
use nia_protocol_rust::DefineActionResponse;
#[derive(Debug, Clone)]
pub struct NiaDefineActionResponse {
command_result: NiaDefineActionCommandResult,
}
fn make_define_action_command(action: NiaNamedAction) -> NiaInterpreterCommand {
let interpreter_named_action = action.to_interpreter_repr();
NiaInterpreterCommand::make_define_action_command(interpreter_named_action)
}
impl NiaDefineActionResponse {
fn try_from(
nia_define_action_request: NiaDefineActionRequest,
event_loop_handle: MutexGuard<EventLoopHandle>,
) -> Result<NiaDefineActionResponse, NiaServerError> {
let action = nia_define_action_request.take_action();
let interpreter_command = make_define_action_command(action);
event_loop_handle
.send_command(interpreter_command)
.map_err(|_| {
NiaServerError::interpreter_error(
"Error sending command to the interpreter.",
)
})?;
let execution_result =
event_loop_handle.receive_result().map_err(|_| {
NiaServerError::interpreter_error(
"Error reading command from the interpreter.",
)
})?;
let response = match execution_result {
NiaInterpreterCommandResult::DefineAction(command_result) => {
NiaDefineActionResponse { command_result }
}
_ => {
return NiaServerError::interpreter_error(
"Unexpected command result.",
)
.into()
}
};
Ok(response)
}
pub fn from(
nia_define_action_request: NiaDefineActionRequest,
event_loop_handle: MutexGuard<EventLoopHandle>,
) -> NiaDefineActionResponse {
println!("{:?}", nia_define_action_request);
let try_result = NiaDefineActionResponse::try_from(
nia_define_action_request,
event_loop_handle,
);
match try_result {
Ok(result) => result,
Err(error) => {
let message =
format!("Execution failure: {}", error.get_message());
let command_result =
NiaDefineActionCommandResult::Failure(message);
NiaDefineActionResponse { command_result }
}
}
}
}
impl
Serializable<
NiaDefineActionResponse,
nia_protocol_rust::DefineActionResponse,
> for NiaDefineActionResponse
{
fn to_pb(&self) -> DefineActionResponse {
let result = &self.command_result;
let mut define_action_response =
nia_protocol_rust::DefineActionResponse::new();
match result {
NiaDefineActionCommandResult::Success() => {
let mut success_result =
nia_protocol_rust::DefineActionResponse_SuccessResult::new(
);
success_result.set_message(protobuf::Chars::from(
String::from("Success."),
));
define_action_response.set_success_result(success_result);
}
NiaDefineActionCommandResult::Error(error_message) => {
let mut error_result =
nia_protocol_rust::DefineActionResponse_ErrorResult::new();
error_result
.set_message(protobuf::Chars::from(error_message.clone()));
define_action_response.set_error_result(error_result);
}
NiaDefineActionCommandResult::Failure(failure_message) => {
let mut failure_result =
nia_protocol_rust::DefineActionResponse_FailureResult::new(
);
failure_result.set_message(protobuf::Chars::from(
failure_message.clone(),
));
define_action_response.set_failure_result(failure_result);
}
}
define_action_response
}
fn from_pb(
object_pb: DefineActionResponse,
) -> NiaServerResult<NiaDefineActionResponse> {
unreachable!()
}
}
|
#![allow(dead_code)]
//!
//! Easy interface for changes in music time.
//!
use super::{
music_time::MusicTime, music_time_counter::MusicTimeCounter, time_signature::TimeSignature,
};
use std::time::{Duration, SystemTime};
const STRING_PANIC_TIME_FLOW: &str = "Hello John Titor, you reversed time!";
/// This trait is used by `MusicTimerEngine` for callbacks in changes of music time.
/// Invoke it to make the most of the performance engine.
pub trait MusicTimerState {
/// Called when the beat interval changes.
///
/// # Arguments
/// - `current_time` - The current time at which this callback has been triggered.
fn on_beat_interval(&mut self, current_time: &MusicTime);
/// Called when the beat changes.
///
/// # Arguments
/// - `current_time` - The current time at which this callback has been triggered.
fn on_beat(&mut self, current_time: &MusicTime);
/// Called when the bar changes
///
/// # Arguments
/// - `current_time` - The current time at which this callback has been triggered.
fn on_bar(&mut self, current_time: &MusicTime);
}
/// The engine uses all of this crate's utilities to allow to use of a music
/// performance state system that triggers callbacks. Its aims are to allow
/// for an easy interface for changes in music time.
pub struct MusicTimerEngine {
total_time: Duration,
previous_time: Duration,
start_time: SystemTime,
event_trigger_time: Duration,
music_counter: MusicTimeCounter,
event_trigger_target: Duration,
previous_music_time: MusicTime,
}
impl MusicTimerEngine {
/// Create a new `MusicTimerEngine` with a `TimeSignature` and bpm.
///
/// # Arguments
/// * `time_signature` - The time signature for the performance.
/// * `bpm` - The beats per minute used for the performance.
///
/// # Example
/// ```
/// use music_timer::{music_timer_engine::MusicTimerEngine, time_signature::TimeSignature};
/// let mut performer = MusicTimerEngine::new(TimeSignature::new(3, 4), 155.0);
/// ```
pub fn new(time_signature: TimeSignature, bpm: f32) -> Self {
let music_counter = MusicTimeCounter::new(time_signature);
let event_trigger_target = music_counter.beat_interval_target_frames(bpm);
MusicTimerEngine {
total_time: Duration::default(),
previous_time: Duration::default(),
start_time: SystemTime::now(),
event_trigger_time: event_trigger_target,
music_counter,
event_trigger_target,
previous_music_time: MusicTime::new(0, 0, 0),
}
}
/// Pulse the engine. The time since the last pulse is used to evaluate if there is
/// a change in music time. It is suggested to call this from a loop.
///
/// # Arguments
/// * `state` - The _trait_ `MusicTimerState` used for changes in music time callbacks.TimeSignature
///
/// # Example
/// ```
/// use music_timer::{music_timer_engine::{MusicTimerEngine, MusicTimerState}, music_time::MusicTime};
/// struct PerformanceState;
/// impl MusicTimerState for PerformanceState {
/// fn on_beat_interval(&mut self, current_time: &MusicTime) {
/// // Do something on the beat interval
/// }
/// fn on_beat(&mut self, current_time: &MusicTime) {
/// // Do something on the beat
/// }
/// fn on_bar(&mut self, current_time: &MusicTime) {
/// // Do something on the bar
/// }
/// }
/// let mut performer_state = PerformanceState{};
/// let mut performer = music_timer::create_performance_engine(3, 4, 155.0);
/// performer.pulse(&mut performer_state);
/// ```
pub fn pulse<TimerState: MusicTimerState>(&mut self, state: &mut TimerState) {
// Progress total time
self.previous_time = self.total_time;
// Time should never reverse else you're in trouble
self.total_time = SystemTime::now()
.duration_since(self.start_time)
.expect(STRING_PANIC_TIME_FLOW);
// Advance by delta
let time_delta = self.total_time - self.previous_time;
self.event_trigger_time += time_delta;
// Check for an advance in the beat interval
let is_beat_interval_advanced = self.event_trigger_time >= self.event_trigger_target;
if is_beat_interval_advanced {
let current_time = self.music_counter.current_time();
// On beat interval change
state.on_beat_interval(¤t_time);
// On beat change
let is_beat_changed =
self.previous_music_time.get_beat() != self.music_counter.current_time().get_beat();
if is_beat_changed {
state.on_beat(¤t_time);
}
// On bar change
let is_bar_changed =
self.previous_music_time.get_bar() != self.music_counter.current_time().get_bar();
if is_bar_changed {
state.on_bar(¤t_time);
}
// Advance and store time
self.previous_music_time = self.music_counter.current_time().clone();
self.music_counter.advance_beat_interval();
// Reset and calibrate drift - https://www.youtube.com/watch?v=Gm7lcZiLOus&t=30s
let initial_d = self.event_trigger_time - self.event_trigger_target;
self.event_trigger_time = initial_d;
}
}
/// Gets the duration of time between beat intervals. Handy for sleeping threads.
///
/// # Example
/// ```
/// let mut performer = music_timer::create_performance_engine(3, 4, 155.0);
///
/// // We can set the delay to be half the trigger target. This will give
/// // us a reasonable cycle speed with enough buffer to keep an accurate time.
/// // This of course is not needed if the application is managing thread sleeping.
/// // The shorter the sleep duration of the thread, the more accurate the
/// // time triggering will be. In most cases setting the sleep to 60fps is recommended for
/// // < 180bpm @ 4/4.
/// let sleep_duration = performer.get_beat_interval_duration() / 2;
/// println!("SLEEP_DURATION: {:?}", sleep_duration);
/// std::thread::sleep(sleep_duration);
/// ```
///
pub fn get_beat_interval_duration(&self) -> Duration {
self.event_trigger_target
}
/// Gets the current music time of the performance.
pub fn get_current_time(&self) -> &MusicTime {
self.music_counter.current_time()
}
/// Sets the current music time.
///
/// # Arguments
/// * `time` - The new music time to set.
pub fn set_music_timer(&mut self, time: MusicTime) -> &mut Self {
self.music_counter.set_current_time(time);
self
}
}
|
#[no_mangle]
extern "C" fn loop_forever(label: i32) -> i32 {
loop {
println!("Label {}", label);
}
}
#[no_mangle]
extern "C" fn print_stuff() {
println!("stuff");
}
|
use wasm_bindgen::JsCast;
pub struct TexstureLayer {
element: web_sys::HtmlCanvasElement,
context: web_sys::CanvasRenderingContext2d,
}
#[allow(dead_code)]
impl TexstureLayer {
pub fn new(size: &[u32; 2]) -> Self {
let element = web_sys::window()
.unwrap()
.document()
.unwrap()
.create_element("canvas")
.unwrap()
.dyn_into::<web_sys::HtmlCanvasElement>()
.unwrap();
element.set_width(size[0]);
element.set_height(size[1]);
let context = Self::get_context2d_from_canvas(&element);
Self { element, context }
}
fn get_context2d_from_canvas(
canvas: &web_sys::HtmlCanvasElement,
) -> web_sys::CanvasRenderingContext2d {
canvas
.get_context("2d")
.unwrap()
.unwrap()
.dyn_into::<web_sys::CanvasRenderingContext2d>()
.unwrap()
}
pub fn reset_context(&mut self) {
self.context = Self::get_context2d_from_canvas(&self.element);
}
pub fn set_size(&mut self, size: &[u32; 2]) {
self.element.set_width(size[0]);
self.element.set_height(size[1]);
self.reset_context();
}
pub fn element(&self) -> &web_sys::HtmlCanvasElement {
&self.element
}
pub fn context(&self) -> &web_sys::CanvasRenderingContext2d {
&self.context
}
}
|
pub mod saving_parameters;
pub use saving_parameters::SavingParameters;
|
//! create a ping and a pong services that can be used for testing
//! the different operations available in term of intercom and
//! monitoring how the start and shutdown process works
//!
use async_trait::async_trait;
use organix::{
service, IntercomMsg, Organix, Service, ServiceIdentifier, ServiceState, WatchdogBuilder,
};
use std::time::Duration;
use tokio::time::delay_for;
struct Ping {
state: ServiceState<Self>,
}
struct Pong {
state: ServiceState<Self>,
}
#[derive(Debug, IntercomMsg)]
struct PingMsg;
#[derive(Debug, IntercomMsg)]
struct PongMsg;
#[async_trait]
impl Service for Ping {
const SERVICE_IDENTIFIER: ServiceIdentifier = "ping";
type IntercomMsg = PingMsg;
fn prepare(state: ServiceState<Self>) -> Self {
Self { state }
}
async fn start(mut self) {
let mut pong = self.state.intercom_with::<Pong>();
while let Some(msg) = self.state.intercom_mut().recv().await {
dbg!(msg);
delay_for(Duration::from_millis(50)).await;
if let Err(err) = pong.send(PongMsg).await {
dbg!(err);
break;
}
}
}
}
#[async_trait]
impl Service for Pong {
const SERVICE_IDENTIFIER: ServiceIdentifier = "pong";
type IntercomMsg = PongMsg;
fn prepare(state: ServiceState<Self>) -> Self {
Self { state }
}
async fn start(mut self) {
let mut ping = self.state.intercom_with::<Ping>();
ping.send(PingMsg).await.unwrap();
while let Some(msg) = self.state.intercom_mut().recv().await {
dbg!(msg);
delay_for(Duration::from_millis(50)).await;
if let Err(err) = ping.send(PingMsg).await {
dbg!(err);
break;
}
}
}
}
#[derive(Organix)]
#[runtime(shared)]
struct PingPongServices {
#[runtime(time)]
ping: service::ServiceManager<Ping>,
pong: service::ServiceManager<Pong>,
}
/// test that the execution of the watchdog will be stopped shortly
/// after receiving the shutdown command from the controller
#[test]
fn ping_pong() {
use tracing_subscriber::{fmt, EnvFilter};
let subscriber = fmt::Subscriber::builder()
.with_env_filter(EnvFilter::from_default_env())
.finish();
tracing::subscriber::set_global_default(subscriber).expect("setting tracing default failed");
let watchdog = WatchdogBuilder::<PingPongServices>::new().build();
let mut controller = watchdog.control();
watchdog.spawn(async move {
controller.start::<Ping>().await.unwrap();
controller.start::<Pong>().await.unwrap();
delay_for(Duration::from_millis(400)).await;
controller.shutdown().await;
});
watchdog.wait_finished();
}
|
use super::*;
use smallvec::SmallVec;
const SMALLVEC_ARRAY_LEN: usize = 8;
type SmallVecArray<V> = [V; SMALLVEC_ARRAY_LEN];
/// A collection of invalidities resulting from a validation
///
/// Collects invalidities that are detected while performing
/// a validation.
#[derive(Clone, Debug)]
#[cfg_attr(test, derive(Eq, PartialEq))]
pub struct Context<V>
where
V: Invalidity,
{
invalidities: SmallVec<SmallVecArray<V>>,
}
impl<V> Context<V>
where
V: Invalidity,
{
/// Create a new valid and empty context
#[inline]
pub fn new() -> Self {
Self {
invalidities: SmallVec::new(),
}
}
/// Check if the context is still valid
#[inline]
pub fn is_valid(&self) -> bool {
self.invalidities.is_empty()
}
/// Record a new invalidity within this context
#[inline]
pub fn invalidate(mut self, invalidity: impl Into<V>) -> Self {
self.invalidities.push(invalidity.into());
self
}
/// Conditionally record a new invalidity within this context
#[inline]
pub fn invalidate_if(self, is_violated: bool, invalidity: impl Into<V>) -> Self {
if is_violated {
self.invalidate(invalidity)
} else {
self
}
}
// TODO: Make public?
fn merge(mut self, other: Self) -> Self {
self.invalidities.reserve(other.invalidities.len());
for error in other.invalidities.into_iter() {
self.invalidities.push(error);
}
self
}
/// Merge the results of another validation
///
/// Needed for collecting results from custom validation functions.
#[inline]
pub fn merge_result(self, res: Result<V>) -> Self {
if let Err(other) = res {
self.merge(other)
} else {
self
}
}
/// Merge the mapped results of another validation
///
/// Needed for collecting results from custom validation functions.
pub fn map_and_merge_result<F, U>(mut self, res: Result<U>, map: F) -> Self
where
F: Fn(U) -> V,
U: Invalidity,
{
if let Err(other) = res {
self.invalidities.reserve(other.invalidities.len());
for v in other.invalidities.into_iter() {
self.invalidities.push(map(v))
}
}
self
}
/// Validate the target and merge the result into this context
#[inline]
pub fn validate(self, target: &impl Validate<Invalidity = V>) -> Self {
self.merge_result(target.validate())
}
/// Validate the target, map the result, and merge it into this context
#[inline]
pub fn validate_and_map<F, U>(self, target: &impl Validate<Invalidity = U>, map: F) -> Self
where
F: Fn(U) -> V,
U: Invalidity,
{
self.map_and_merge_result(target.validate(), map)
}
/// Finish the current validation of this context with a result
#[inline]
pub fn into_result(self) -> Result<V> {
if self.invalidities.is_empty() {
Ok(())
} else {
Err(self)
}
}
}
impl<V> Default for Context<V>
where
V: Invalidity,
{
fn default() -> Self {
Self::new()
}
}
impl<V> Into<Result<V>> for Context<V>
where
V: Invalidity,
{
fn into(self) -> Result<V> {
self.into_result()
}
}
/// Transform the validation context into an iterator
/// that yields all the collected invalidities.
impl<V> IntoIterator for Context<V>
where
V: Invalidity,
{
type Item = V;
// TODO: Replace with an opaque, existantial type eventually (if ever possible):
// type IntoIter = impl Iterator<V>;
type IntoIter = smallvec::IntoIter<SmallVecArray<V>>;
fn into_iter(self) -> Self::IntoIter {
self.invalidities.into_iter()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn valid_context() {
let context = Context::<()>::new();
assert!(context.is_valid());
assert!(context.invalidities.is_empty());
assert!(context.into_result().is_ok());
}
#[test]
fn default_context() {
assert_eq!(Context::<()>::new(), Context::<()>::default());
}
#[test]
fn invalidate() {
let mut context = Context::<()>::new();
assert!(context.is_valid());
for _ in 0..=SMALLVEC_ARRAY_LEN {
let invalidities_before = context.invalidities.len();
context = context.invalidate(());
assert!(!context.is_valid());
let invalidities_after = context.invalidities.len();
assert_eq!(invalidities_after, invalidities_before + 1);
}
assert_eq!(SMALLVEC_ARRAY_LEN + 1, context.invalidities.len());
assert!(context.into_result().is_err());
}
}
|
#![allow(dead_code)]
pub const MSTATUS_MPP_MASK: u64 = 6144;
pub const MSTATUS_MPP_M: u64 = 6144;
pub const MSTATUS_MPP_S: u64 = 2048;
pub const MSTATUS_MPP_U: u64 = 0;
pub const MSTATUS_MIE: u64 = 8;
pub const SSTATUS_SIE: u64 = 2;
#[inline]
pub fn r_sstatus() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, sstatus" : "=r" (x));
}
x
}
#[inline]
pub fn w_sstatus(x: u64) {
unsafe {
llvm_asm!("csrw sstatus, $0" : : "r" (x));
}
}
#[inline]
pub fn r_mstatus() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mstatus" : "=r" (x));
}
x
}
#[inline]
pub fn w_mstatus(x: u64) {
unsafe {
llvm_asm!("csrw mstatus, $0" : : "r" (x));
}
}
#[inline]
pub fn w_satp(x: u64) {
unsafe {
llvm_asm!("csrw satp, $0" : : "r" (x));
}
}
#[inline]
pub fn r_medeleg() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, medeleg" : "=r" (x));
}
x
}
#[inline]
pub fn w_medeleg(x: u64) {
unsafe {
llvm_asm!("csrw medeleg, $0" : : "r" (x));
}
}
#[inline]
pub fn r_mepc() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mepc" : "=r" (x));
}
x
}
#[inline]
pub fn w_mepc(x: u64) {
unsafe {
llvm_asm!("csrw mepc, $0" : : "r" (x));
}
}
#[inline]
pub fn r_mideleg() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mideleg" : "=r" (x));
}
x
}
#[inline]
pub fn w_mideleg(x: u64) {
unsafe {
llvm_asm!("csrw mideleg, $0" : : "r" (x));
}
}
pub const SIE_SEIE: u64 = 512;
pub const SIE_STIE: u64 = 32;
pub const SIE_SSIE: u64 = 2;
#[inline]
pub fn r_sie() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, sie" : "=r" (x));
}
x
}
#[inline]
pub fn w_sie(x: u64) {
unsafe {
llvm_asm!("csrw sie, $0" : : "r" (x));
}
}
pub const MIP_STIP: u64 = 32;
#[inline]
pub fn r_mip() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mip" : "=r" (x));
}
x
}
#[inline]
pub fn w_mip(x: u64) {
unsafe {
llvm_asm!("csrw mip, $0" : : "r" (x));
}
}
pub const MIE_MEIE: u64 = 2048;
pub const MIE_MTIE: u64 = 128;
pub const MIE_MSIE: u64 = 8;
#[inline]
pub fn r_mie() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mie" : "=r" (x));
}
x
}
#[inline]
pub fn w_mie(x: u64) {
unsafe {
llvm_asm!("csrw mie, $0" : : "r" (x));
}
}
#[inline]
pub fn w_mtvec(x: u64) {
unsafe {
llvm_asm!("csrw mtvec, $0" : : "r" (x));
}
}
#[inline]
pub fn w_stvec(x: u64) {
unsafe {
llvm_asm!("csrw stvec, $0" : : "r" (x));
}
}
#[inline]
pub fn w_mscratch(x: u64) {
unsafe {
llvm_asm!("csrw mscratch, $0" : : "r" (x));
}
}
#[inline]
pub fn r_scause() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, scause" : "=r" (x));
}
x
}
#[inline]
pub fn r_stval() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, stval" : "=r" (x));
}
x
}
#[inline]
pub fn r_mcause() -> u64 {
let mut x: u64;
unsafe {
llvm_asm!("csrr $0, mcause" : "=r" (x));
}
x
}
#[inline]
pub fn sfence_vma() {
unsafe {
llvm_asm!("sfence.vma zero, zero");
}
}
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo};
use std::fmt::Debug;
#[derive(Clone, Debug, Default)]
#[repr(C)]
pub struct EntityPerception {
pub perceived_entity_id: u32,
pub perceiver_id: u32,
pub perceiver_payloads: Vec<u32>,
pub velocity: Vec<f32>,
pub velocity_error: Vec<f32>,
pub velocity_valid: bool,
pub attitude: Vec<f32>,
pub attitude_error: Vec<f32>,
pub attitude_valid: bool,
pub location: Box<::afrl::cmasi::location3d::Location3DT>,
pub location_error: Vec<f32>,
pub time_last_seen: i64,
}
impl PartialEq for EntityPerception {
fn eq(&self, _other: &EntityPerception) -> bool {
true
&& &self.perceived_entity_id == &_other.perceived_entity_id
&& &self.perceiver_id == &_other.perceiver_id
&& &self.perceiver_payloads == &_other.perceiver_payloads
&& &self.velocity == &_other.velocity
&& &self.velocity_error == &_other.velocity_error
&& &self.velocity_valid == &_other.velocity_valid
&& &self.attitude == &_other.attitude
&& &self.attitude_error == &_other.attitude_error
&& &self.attitude_valid == &_other.attitude_valid
&& &self.location == &_other.location
&& &self.location_error == &_other.location_error
&& &self.time_last_seen == &_other.time_last_seen
}
}
impl LmcpSubscription for EntityPerception {
fn subscription() -> &'static str { "afrl.cmasi.perceive.EntityPerception" }
}
impl Struct for EntityPerception {
fn struct_info() -> StructInfo {
StructInfo {
exist: 1,
series: 5784119745305990725u64,
version: 1,
struct_ty: 1,
}
}
}
impl Lmcp for EntityPerception {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
let mut pos = 0;
{
let x = Self::struct_info().ser(buf)?;
pos += x;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.perceived_entity_id.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.perceiver_id.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.perceiver_payloads.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.velocity.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.velocity_error.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.velocity_valid.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.attitude.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.attitude_error.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.attitude_valid.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.location.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.location_error.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.time_last_seen.ser(r)?;
pos += writeb;
}
Ok(pos)
}
fn deser(buf: &[u8]) -> Result<(EntityPerception, usize), Error> {
let mut pos = 0;
let (si, u) = StructInfo::deser(buf)?;
pos += u;
if si == EntityPerception::struct_info() {
let mut out: EntityPerception = Default::default();
{
let r = get!(buf.get(pos ..));
let (x, readb): (u32, usize) = Lmcp::deser(r)?;
out.perceived_entity_id = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (u32, usize) = Lmcp::deser(r)?;
out.perceiver_id = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<u32>, usize) = Lmcp::deser(r)?;
out.perceiver_payloads = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<f32>, usize) = Lmcp::deser(r)?;
out.velocity = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<f32>, usize) = Lmcp::deser(r)?;
out.velocity_error = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (bool, usize) = Lmcp::deser(r)?;
out.velocity_valid = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<f32>, usize) = Lmcp::deser(r)?;
out.attitude = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<f32>, usize) = Lmcp::deser(r)?;
out.attitude_error = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (bool, usize) = Lmcp::deser(r)?;
out.attitude_valid = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Box<::afrl::cmasi::location3d::Location3DT>, usize) = Lmcp::deser(r)?;
out.location = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<f32>, usize) = Lmcp::deser(r)?;
out.location_error = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.time_last_seen = x;
pos += readb;
}
Ok((out, pos))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
let mut size = 15;
size += self.perceived_entity_id.size();
size += self.perceiver_id.size();
size += self.perceiver_payloads.size();
size += self.velocity.size();
size += self.velocity_error.size();
size += self.velocity_valid.size();
size += self.attitude.size();
size += self.attitude_error.size();
size += self.attitude_valid.size();
size += self.location.size();
size += self.location_error.size();
size += self.time_last_seen.size();
size
}
}
pub trait EntityPerceptionT: Debug + Send {
fn as_afrl_cmasi_perceive_entity_perception(&self) -> Option<&EntityPerception> { None }
fn as_mut_afrl_cmasi_perceive_entity_perception(&mut self) -> Option<&mut EntityPerception> { None }
fn perceived_entity_id(&self) -> u32;
fn perceived_entity_id_mut(&mut self) -> &mut u32;
fn perceiver_id(&self) -> u32;
fn perceiver_id_mut(&mut self) -> &mut u32;
fn perceiver_payloads(&self) -> &Vec<u32>;
fn perceiver_payloads_mut(&mut self) -> &mut Vec<u32>;
fn velocity(&self) -> &Vec<f32>;
fn velocity_mut(&mut self) -> &mut Vec<f32>;
fn velocity_error(&self) -> &Vec<f32>;
fn velocity_error_mut(&mut self) -> &mut Vec<f32>;
fn velocity_valid(&self) -> bool;
fn velocity_valid_mut(&mut self) -> &mut bool;
fn attitude(&self) -> &Vec<f32>;
fn attitude_mut(&mut self) -> &mut Vec<f32>;
fn attitude_error(&self) -> &Vec<f32>;
fn attitude_error_mut(&mut self) -> &mut Vec<f32>;
fn attitude_valid(&self) -> bool;
fn attitude_valid_mut(&mut self) -> &mut bool;
fn location(&self) -> &Box<::afrl::cmasi::location3d::Location3DT>;
fn location_mut(&mut self) -> &mut Box<::afrl::cmasi::location3d::Location3DT>;
fn location_error(&self) -> &Vec<f32>;
fn location_error_mut(&mut self) -> &mut Vec<f32>;
fn time_last_seen(&self) -> i64;
fn time_last_seen_mut(&mut self) -> &mut i64;
}
impl Clone for Box<EntityPerceptionT> {
fn clone(&self) -> Box<EntityPerceptionT> {
if let Some(x) = EntityPerceptionT::as_afrl_cmasi_perceive_entity_perception(self.as_ref()) {
Box::new(x.clone())
} else {
unreachable!()
}
}
}
impl Default for Box<EntityPerceptionT> {
fn default() -> Box<EntityPerceptionT> { Box::new(EntityPerception::default()) }
}
impl PartialEq for Box<EntityPerceptionT> {
fn eq(&self, other: &Box<EntityPerceptionT>) -> bool {
if let (Some(x), Some(y)) =
(EntityPerceptionT::as_afrl_cmasi_perceive_entity_perception(self.as_ref()),
EntityPerceptionT::as_afrl_cmasi_perceive_entity_perception(other.as_ref())) {
x == y
} else {
false
}
}
}
impl Lmcp for Box<EntityPerceptionT> {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
if let Some(x) = EntityPerceptionT::as_afrl_cmasi_perceive_entity_perception(self.as_ref()) {
x.ser(buf)
} else {
unreachable!()
}
}
fn deser(buf: &[u8]) -> Result<(Box<EntityPerceptionT>, usize), Error> {
let (si, _) = StructInfo::deser(buf)?;
if si == EntityPerception::struct_info() {
let (x, readb) = EntityPerception::deser(buf)?;
Ok((Box::new(x), readb))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
if let Some(x) = EntityPerceptionT::as_afrl_cmasi_perceive_entity_perception(self.as_ref()) {
x.size()
} else {
unreachable!()
}
}
}
impl EntityPerceptionT for EntityPerception {
fn as_afrl_cmasi_perceive_entity_perception(&self) -> Option<&EntityPerception> { Some(self) }
fn as_mut_afrl_cmasi_perceive_entity_perception(&mut self) -> Option<&mut EntityPerception> { Some(self) }
fn perceived_entity_id(&self) -> u32 { self.perceived_entity_id }
fn perceived_entity_id_mut(&mut self) -> &mut u32 { &mut self.perceived_entity_id }
fn perceiver_id(&self) -> u32 { self.perceiver_id }
fn perceiver_id_mut(&mut self) -> &mut u32 { &mut self.perceiver_id }
fn perceiver_payloads(&self) -> &Vec<u32> { &self.perceiver_payloads }
fn perceiver_payloads_mut(&mut self) -> &mut Vec<u32> { &mut self.perceiver_payloads }
fn velocity(&self) -> &Vec<f32> { &self.velocity }
fn velocity_mut(&mut self) -> &mut Vec<f32> { &mut self.velocity }
fn velocity_error(&self) -> &Vec<f32> { &self.velocity_error }
fn velocity_error_mut(&mut self) -> &mut Vec<f32> { &mut self.velocity_error }
fn velocity_valid(&self) -> bool { self.velocity_valid }
fn velocity_valid_mut(&mut self) -> &mut bool { &mut self.velocity_valid }
fn attitude(&self) -> &Vec<f32> { &self.attitude }
fn attitude_mut(&mut self) -> &mut Vec<f32> { &mut self.attitude }
fn attitude_error(&self) -> &Vec<f32> { &self.attitude_error }
fn attitude_error_mut(&mut self) -> &mut Vec<f32> { &mut self.attitude_error }
fn attitude_valid(&self) -> bool { self.attitude_valid }
fn attitude_valid_mut(&mut self) -> &mut bool { &mut self.attitude_valid }
fn location(&self) -> &Box<::afrl::cmasi::location3d::Location3DT> { &self.location }
fn location_mut(&mut self) -> &mut Box<::afrl::cmasi::location3d::Location3DT> { &mut self.location }
fn location_error(&self) -> &Vec<f32> { &self.location_error }
fn location_error_mut(&mut self) -> &mut Vec<f32> { &mut self.location_error }
fn time_last_seen(&self) -> i64 { self.time_last_seen }
fn time_last_seen_mut(&mut self) -> &mut i64 { &mut self.time_last_seen }
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for EntityPerception {
fn arbitrary<G: Gen>(_g: &mut G) -> EntityPerception {
EntityPerception {
perceived_entity_id: Arbitrary::arbitrary(_g),
perceiver_id: Arbitrary::arbitrary(_g),
perceiver_payloads: Arbitrary::arbitrary(_g),
velocity: Arbitrary::arbitrary(_g),
velocity_error: Arbitrary::arbitrary(_g),
velocity_valid: Arbitrary::arbitrary(_g),
attitude: Arbitrary::arbitrary(_g),
attitude_error: Arbitrary::arbitrary(_g),
attitude_valid: Arbitrary::arbitrary(_g),
location: Box::new(::afrl::cmasi::location3d::Location3D::arbitrary(_g)),
location_error: Arbitrary::arbitrary(_g),
time_last_seen: Arbitrary::arbitrary(_g),
}
}
}
quickcheck! {
fn serializes(x: EntityPerception) -> Result<TestResult, Error> {
use std::u16;
if x.perceiver_payloads.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.velocity.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.velocity_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.attitude.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.attitude_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.location_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
Ok(TestResult::from_bool(sx == x.size()))
}
fn roundtrips(x: EntityPerception) -> Result<TestResult, Error> {
use std::u16;
if x.perceiver_payloads.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.velocity.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.velocity_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.attitude.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.attitude_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.location_error.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
let (y, sy) = EntityPerception::deser(&buf)?;
Ok(TestResult::from_bool(sx == sy && x == y))
}
}
}
|
use grid::Grid;
pub mod part1;
pub mod part2;
pub fn run() {
part1::run();
part2::run();
}
pub fn default_input() -> &'static str{
include_str!("input")
}
pub fn parse_input(input : &str) -> Grid<char> {
let init :Vec<_> = input.lines().next().unwrap().chars().collect();
let len = &init.len();
let mut grid = Grid::from_vec(init, *len);
for l in input.lines().skip(1) {
grid.push_row(l.chars().collect())
}
grid
} |
#[cfg(test)]
mod tests {
use kingslayer::Cli;
#[test]
fn player_equip() {
let cli = Cli::from_file("worlds/test_world.ron");
cli.ask("n");
cli.ask("take iron sword");
assert!(cli.ask("i").contains("iron sword") && !cli.ask("i").contains("Main hand"));
assert!(cli.ask("draw iron sword").contains("Equipped."));
assert!(
cli.ask("i").contains("Main hand: iron sword")
&& !cli.ask("i").contains(" iron sword")
);
assert!(cli.ask("drop sword").contains("Dropped."));
assert!(
!cli.ask("i").contains("Main hand")
&& !cli.ask("i").contains("iron sword")
&& cli.ask("l").contains("iron sword")
);
}
#[test]
fn player_don() {
let cli = Cli::from_file("worlds/test_world.ron");
cli.ask("n");
cli.ask("take leather armor");
assert!(cli.ask("i").contains("leather armor") && !cli.ask("i").contains("Armor"));
assert!(cli.ask("don leather armor").contains("Donned."));
assert!(
cli.ask("i").contains("Armor: leather armor")
&& !cli.ask("i").contains(" leather armor")
);
}
}
|
#[macro_use(lazy_static)]
extern crate lazy_static;
extern crate regex;
use std::collections::hash_map::HashMap;
use std::env;
use std::fs;
use std::process;
use regex::Regex;
type Minute = u8;
type GuardIdentifier = u32;
type GuardSleepingPeriods = Vec<GuardSleepPeriod>;
type GuardSleepingSummary = HashMap<GuardIdentifier, GuardSleepingPeriods>;
static LOG_LINE_FALLS_ASLEEP: &'static str = "falls asleep";
static LOG_LINE_WAKES_UP: &'static str = "wakes up";
#[derive(Debug)]
struct GuardSleepPeriod {
start_minute: Minute,
end_minute: Minute,
}
impl GuardSleepPeriod {
fn duration(&self) -> u8 {
self.end_minute - self.start_minute
}
}
#[derive(Debug)]
struct GuardBehaviour {
guard_identifier: GuardIdentifier,
minutes_asleep: u32,
most_frequent_minute_asleep: Minute,
}
impl GuardBehaviour {
fn solution(&self) -> u32 {
self.guard_identifier * (self.most_frequent_minute_asleep as u32)
}
}
fn parse_to_guard_sleeps(file_contents: String) -> GuardSleepingSummary {
// Sorting input lines has not been implemented
lazy_static! {
static ref GUARD_LOG_LINE_REGEX: Regex =
Regex::new(r"^\[(?P<year>\d{4})\-(?P<month>\d{2})\-(?P<day>\d{2}) (?P<hour>\d{2}):(?P<minute>\d{2})\] (?P<log_line>.*)$")
.unwrap();
static ref GUARD_BEGINS_REGEX: Regex = Regex::new(
r"Guard #(?P<guard_identifier>\d+) begins shift"
).unwrap();
}
let mut guard_sleeps: HashMap<GuardIdentifier, GuardSleepingPeriods> = HashMap::new();
let mut current_guard_id: GuardIdentifier = 0;
let mut current_guard_asleep_minute: u8 = 0;
for line in file_contents.lines() {
match GUARD_LOG_LINE_REGEX.captures(line) {
Some(caps) => {
let log_line = caps.name("log_line").unwrap().as_str();
match GUARD_BEGINS_REGEX.captures(log_line) {
Some(begins_caps) => {
current_guard_id = begins_caps
.name("guard_identifier")
.unwrap()
.as_str()
.parse::<GuardIdentifier>()
.unwrap();
}
_ => (),
}
let log_line_minute = caps.name("minute").unwrap().as_str().parse::<u8>().unwrap();
if log_line == LOG_LINE_FALLS_ASLEEP {
current_guard_asleep_minute = log_line_minute;
} else if log_line == LOG_LINE_WAKES_UP {
let e: &mut Vec<GuardSleepPeriod> = guard_sleeps
.entry(current_guard_id)
.or_insert_with(|| Vec::new());
let period = GuardSleepPeriod {
start_minute: current_guard_asleep_minute,
end_minute: log_line_minute,
};
e.push(period);
}
}
_ => (),
};
}
guard_sleeps
}
fn calc_minutes_asleep(sleeps: &GuardSleepingPeriods) -> u32 {
sleeps.iter().map(|s| s.duration() as u32).sum()
}
fn calc_most_frequent_minute(sleeps: &GuardSleepingPeriods) -> Minute {
let mut minutes_in_hour: HashMap<Minute, u32> = HashMap::new();
for sleep in sleeps.iter() {
let mut k = sleep.start_minute;
while k < sleep.end_minute {
let minute_count = minutes_in_hour.entry(k).or_insert(0);
*minute_count += 1;
k += 1;
}
}
let mut selected_minute: Minute = 0;
let mut max_minute_count: u32 = u32::min_value();
for (minute, minute_count) in minutes_in_hour.iter() {
if *minute_count > max_minute_count {
max_minute_count = *minute_count;
selected_minute = *minute;
}
}
selected_minute
}
fn convert_sleeps_to_behaviours(guard_sleeps: GuardSleepingSummary) -> Vec<GuardBehaviour> {
let mut guard_behaviours: Vec<GuardBehaviour> = Vec::new();
for (guard_identifier, sleeps_per_guard) in guard_sleeps.iter() {
let minutes_asleep: u32 = calc_minutes_asleep(&sleeps_per_guard);
let most_frequent_minute_asleep: Minute = calc_most_frequent_minute(&sleeps_per_guard);
let guard_behaviour = GuardBehaviour {
guard_identifier: *guard_identifier,
minutes_asleep: minutes_asleep,
most_frequent_minute_asleep: most_frequent_minute_asleep,
};
guard_behaviours.push(guard_behaviour);
}
guard_behaviours
}
fn calculate_solution(guard_behaviours: Vec<GuardBehaviour>) -> u32 {
guard_behaviours
.iter()
.max_by_key(|behaviour| behaviour.minutes_asleep)
.map(|behaviour| behaviour.solution())
.unwrap_or(0)
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("One argument is expected: filepath");
process::exit(1);
}
let filepath: &str = &args[1];
let file_contents: String =
fs::read_to_string(filepath).expect("Something went wrong when reading the file");
let guard_sleeps = parse_to_guard_sleeps(file_contents);
let guard_behaviours = convert_sleeps_to_behaviours(guard_sleeps);
let solution: u32 = calculate_solution(guard_behaviours);
println!("Solution: {}", solution);
}
|
//! Driver for the FT6202 Touch Panel.
//!
//! I2C Interface
//!
//! <http://www.tvielectronics.com/ocart/download/controller/FT6206.pdf>
//!
//! The syscall interface is described in [lsm303dlhc.md](https://github.com/tock/tock/tree/master/doc/syscalls/70006_lsm303dlhc.md)
//!
//! Usage
//! -----
//!
//! ```rust
//! let mux_i2c = components::i2c::I2CMuxComponent::new(&stm32f4xx::i2c::I2C1)
//! .finalize(components::i2c_mux_component_helper!());
//!
//! let ft6206 = components::ft6206::Ft6206Component::new(
//! stm32f412g::gpio::PinId::PG05.get_pin().as_ref().unwrap(),
//! )
//! .finalize(components::ft6206_i2c_component_helper!(mux_i2c));
//!
//! Author: Alexandru Radovici <msg4alex@gmail.com>
#![allow(non_camel_case_types)]
use core::cell::Cell;
use enum_primitive::cast::FromPrimitive;
use enum_primitive::enum_from_primitive;
use kernel::common::cells::TakeCell;
use kernel::hil::gpio;
use kernel::hil::i2c::{self, Error};
use kernel::{AppId, Driver, ReturnCode};
use crate::driver;
/// Syscall driver number.
pub const DRIVER_NUM: usize = driver::NUM::Ft6206 as usize;
// Buffer to use for I2C messages
pub static mut BUFFER: [u8; 17] = [0; 17];
enum State {
Idle,
ReadingTouches,
}
enum_from_primitive! {
enum Registers {
REG_NUMTOUCHES = 0x2,
REG_CHIPID = 0xA3,
}
}
pub struct Ft6206<'a> {
i2c: &'a dyn i2c::I2CDevice,
interrupt_pin: &'a dyn gpio::InterruptPin<'a>,
// callback: OptionalCell<Callback>,
state: Cell<State>,
buffer: TakeCell<'static, [u8]>,
}
impl<'a> Ft6206<'a> {
pub fn new(
i2c: &'a dyn i2c::I2CDevice,
interrupt_pin: &'a dyn gpio::InterruptPin<'a>,
buffer: &'static mut [u8],
) -> Ft6206<'a> {
// setup and return struct
interrupt_pin.enable_interrupts(gpio::InterruptEdge::FallingEdge);
Ft6206 {
i2c: i2c,
interrupt_pin: interrupt_pin,
// callback: OptionalCell::empty(),
state: Cell::new(State::Idle),
buffer: TakeCell::new(buffer),
}
}
pub fn is_present(&self) {
self.state.set(State::Idle);
self.buffer.take().map(|buf| {
// turn on i2c to send commands
buf[0] = Registers::REG_CHIPID as u8;
self.i2c.write_read(buf, 1, 1);
});
}
}
impl i2c::I2CClient for Ft6206<'_> {
fn command_complete(&self, buffer: &'static mut [u8], _error: Error) {
self.state.set(State::Idle);
self.buffer.replace(buffer);
self.interrupt_pin
.enable_interrupts(gpio::InterruptEdge::FallingEdge);
}
}
impl gpio::Client for Ft6206<'_> {
fn fired(&self) {
self.buffer.take().map(|buffer| {
self.interrupt_pin.disable_interrupts();
self.state.set(State::ReadingTouches);
buffer[0] = 0;
self.i2c.write_read(buffer, 1, 16);
});
}
}
impl Driver for Ft6206<'_> {
fn command(&self, command_num: usize, _: usize, _: usize, _: AppId) -> ReturnCode {
match command_num {
// is driver present
0 => ReturnCode::SUCCESS,
// on
1 => {
self.is_present();
ReturnCode::SUCCESS
}
// default
_ => ReturnCode::ENOSUPPORT,
}
}
}
|
/* This is part of mktcb - which is under the MIT License ********************/
use std::io::Write;
use std::path::PathBuf;
use crate::error::Result;
use crate::error;
use crate::decompress;
use crate::util;
use indicatif::{ProgressBar, ProgressStyle};
use snafu::{ResultExt, ensure};
use log::*;
use curl::easy::Easy;
pub fn check(handle: &mut Easy, url: &url::Url) -> Result<bool> {
debug!("Checking if patch is available at {:#?}", url);
handle.url(url.as_str())
.context(error::URLError{url: url.clone()})?;
handle.perform()
.context(error::RequestError{url: url.clone()})?;
let code = handle.response_code()
.context(error::RequestError{url: url.clone()})?;
// We have joined the server and performed a request. If we get
// a hit (200), the file is available. If we get 404, we know for
// sure the file is not there, move along.
// For the other cases, it may be trickier: is the file actually
// there, but did we run into a network error? To simplify, and
// because I lack expertise here (what about reditections?), we
// will consider it as a success with no update.
match code {
200 => Ok(true),
404 => Ok(false),
_ => Ok(false),
}
}
pub fn to_file(handle: &mut Easy, url: &url::Url, path: &std::path::PathBuf) -> Result<()> {
handle.url(url.as_str()).context(error::URLError{url: url.clone()})?;
let mut file = std::fs::File::create(&path).context(
error::CreateFileError{ path: path.clone() }
)?;
let pb = ProgressBar::new(0);
pb.set_style(ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")
.progress_chars("#>-"));
handle.progress(true).context(error::CURLSetupError{})?;
{
let mut transfer = handle.transfer();
transfer.progress_function(|total, dl, _, _| {
pb.set_length(total as u64);
pb.set_position(dl as u64);
true
}).context(error::CURLSetupError{})?;
transfer.write_function(|data| {
// TODO - I have no idea how to handle the error here. The closure
// expects us to return a WriteError,
// https://docs.rs/curl/0.5.0/curl/easy/enum.WriteError.html
// but it only has one field: pause, which is definitely not what I
// expect to return.
// So we just hope for the best...
file.write_all(data).unwrap();
Ok(data.len())
}).context(error::CURLSetupError{})?;
// And start the download!!! info!("Downloading file from {}", url);
transfer.perform().context(error::RequestError{url: url.clone()})?;
}
// Now that we have performed the transfer (or failed it!!) query the
// return code to raise a proper error.
let code = handle.response_code()
.context(error::RequestError{url: url.clone()})?;
let is_ok = match code {
200 => true,
226 => true, // See https://tools.ietf.org/html/rfc3229
_ => false,
};
ensure!(is_ok, error::DownloadError{
url: url.clone(),
code: code,
});
Ok(())
}
/// Downloads a compressed tar archive from URL and store it in in_dir.
/// The archive will be unpacked and also placed in in_dir, and the
/// resulting output directory must patch expected_dir.
pub fn to_unpacked_dir(
http_handle: &mut curl::easy::Easy,
url: &url::Url,
in_dir: &PathBuf,
expected_dir: &PathBuf) -> Result<()>
{
// The output dir shall not already exist
assert!(! expected_dir.is_dir());
// First, create the directory in which the download will be placed
std::fs::create_dir_all(in_dir).context(
error::CreateDirError{ path: in_dir.clone() })?;
// Compose the full path to the archive to be downloaded
let mut tar_path = in_dir.clone();
tar_path.push(util::url_last(url)?);
// Download the archive and unpack it, effectively returning the unpacked
// directory
to_file(http_handle, url, &tar_path)?;
let out_dir = decompress::untar(&tar_path)?;
// Make sure it was extracted at the expected place
ensure!(&out_dir == expected_dir, error::UnexpectedUntar{
arch: tar_path.clone(),
dir: expected_dir.clone(),
});
assert!(expected_dir.is_dir());
Ok(())
}
|
extern crate openssl;
#[macro_use]
extern crate serde;
pub mod config;
pub mod res;
pub mod routes;
pub mod server;
pub use config::Config;
pub use res::*;
use artell_infra::pg::Postgres;
#[tokio::main]
async fn main() {
pretty_env_logger::init();
openssl_probe::init_ssl_cert_env_vars();
let db_url = get_env_var_or_panic("DATABASE_URL");
let pg = Postgres::new(db_url);
let port = get_env_var_u16_or_panic("PORT");
server::bind(Config::new(pg, "artell".to_string()), ([0, 0, 0, 0], port)).await
}
fn get_env_var_or_panic(key: &'static str) -> String {
std::env::var(key).unwrap()
}
fn get_env_var_u16_or_panic(key: &'static str) -> u16 {
let s = get_env_var_or_panic(key);
u16::from_str_radix(s.as_str(), 10).unwrap()
}
|
extern crate aoc_2022;
extern crate aoc_runner;
extern crate aoc_runner_derive;
use aoc_runner_derive::aoc_main;
aoc_main! { lib = aoc_2022 }
|
// This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use codec::{Decode, Encode, EncodeLike};
use frame_support::{
assert_noop, assert_ok,
dispatch::{DispatchError, DispatchResult},
storage::{with_transaction, TransactionOutcome::*},
transactional, StorageMap, StorageValue,
};
use sp_io::TestExternalities;
use sp_std::result;
pub trait Trait {
type Origin;
type BlockNumber: Encode + Decode + EncodeLike + Default + Clone;
}
frame_support::decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin, system=self {
#[weight = 0]
#[transactional]
fn value_commits(_origin, v: u32) {
Value::set(v);
}
#[weight = 0]
#[transactional]
fn value_rollbacks(_origin, v: u32) -> DispatchResult {
Value::set(v);
Err(DispatchError::Other("nah"))
}
}
}
frame_support::decl_storage! {
trait Store for Module<T: Trait> as StorageTransactions {
pub Value: u32;
pub Map: map hasher(twox_64_concat) String => u32;
}
}
struct Runtime;
impl Trait for Runtime {
type Origin = u32;
type BlockNumber = u32;
}
#[test]
fn storage_transaction_basic_commit() {
TestExternalities::default().execute_with(|| {
assert_eq!(Value::get(), 0);
assert!(!Map::contains_key("val0"));
with_transaction(|| {
Value::set(99);
Map::insert("val0", 99);
assert_eq!(Value::get(), 99);
assert_eq!(Map::get("val0"), 99);
Commit(())
});
assert_eq!(Value::get(), 99);
assert_eq!(Map::get("val0"), 99);
});
}
#[test]
fn storage_transaction_basic_rollback() {
TestExternalities::default().execute_with(|| {
assert_eq!(Value::get(), 0);
assert_eq!(Map::get("val0"), 0);
with_transaction(|| {
Value::set(99);
Map::insert("val0", 99);
assert_eq!(Value::get(), 99);
assert_eq!(Map::get("val0"), 99);
Rollback(())
});
assert_eq!(Value::get(), 0);
assert_eq!(Map::get("val0"), 0);
});
}
#[test]
fn storage_transaction_rollback_then_commit() {
TestExternalities::default().execute_with(|| {
Value::set(1);
Map::insert("val1", 1);
with_transaction(|| {
Value::set(2);
Map::insert("val1", 2);
Map::insert("val2", 2);
with_transaction(|| {
Value::set(3);
Map::insert("val1", 3);
Map::insert("val2", 3);
Map::insert("val3", 3);
assert_eq!(Value::get(), 3);
assert_eq!(Map::get("val1"), 3);
assert_eq!(Map::get("val2"), 3);
assert_eq!(Map::get("val3"), 3);
Rollback(())
});
assert_eq!(Value::get(), 2);
assert_eq!(Map::get("val1"), 2);
assert_eq!(Map::get("val2"), 2);
assert_eq!(Map::get("val3"), 0);
Commit(())
});
assert_eq!(Value::get(), 2);
assert_eq!(Map::get("val1"), 2);
assert_eq!(Map::get("val2"), 2);
assert_eq!(Map::get("val3"), 0);
});
}
#[test]
fn storage_transaction_commit_then_rollback() {
TestExternalities::default().execute_with(|| {
Value::set(1);
Map::insert("val1", 1);
with_transaction(|| {
Value::set(2);
Map::insert("val1", 2);
Map::insert("val2", 2);
with_transaction(|| {
Value::set(3);
Map::insert("val1", 3);
Map::insert("val2", 3);
Map::insert("val3", 3);
assert_eq!(Value::get(), 3);
assert_eq!(Map::get("val1"), 3);
assert_eq!(Map::get("val2"), 3);
assert_eq!(Map::get("val3"), 3);
Commit(())
});
assert_eq!(Value::get(), 3);
assert_eq!(Map::get("val1"), 3);
assert_eq!(Map::get("val2"), 3);
assert_eq!(Map::get("val3"), 3);
Rollback(())
});
assert_eq!(Value::get(), 1);
assert_eq!(Map::get("val1"), 1);
assert_eq!(Map::get("val2"), 0);
assert_eq!(Map::get("val3"), 0);
});
}
#[test]
fn transactional_annotation() {
#[transactional]
fn value_commits(v: u32) -> result::Result<u32, &'static str> {
Value::set(v);
Ok(v)
}
#[transactional]
fn value_rollbacks(v: u32) -> result::Result<u32, &'static str> {
Value::set(v);
Err("nah")
}
TestExternalities::default().execute_with(|| {
assert_ok!(value_commits(2), 2);
assert_eq!(Value::get(), 2);
assert_noop!(value_rollbacks(3), "nah");
});
}
#[test]
fn transactional_annotation_in_decl_module() {
TestExternalities::default().execute_with(|| {
let origin = 0;
assert_ok!(<Module<Runtime>>::value_commits(origin, 2));
assert_eq!(Value::get(), 2);
assert_noop!(<Module<Runtime>>::value_rollbacks(origin, 3), "nah");
});
}
|
#[derive(Clone, Debug)]
pub struct UserData {
pub name: String,
pub value: UserDataValue,
}
#[derive(Clone, Debug)]
pub enum UserDataValue {
Int (i32),
Float (f32),
String (String),
}
|
use std::fmt::Debug;
// #[syntex_modifier] turns into #[derive(Clone)]
#[syntex_modifier]
struct S<'a>(&'a str);
fn main() {
// syntex_macro!() turns into "hello world"
let s = S(syntex_macro!());
assert_debug::<S>();
println!("{}", s.0);
}
fn assert_debug<T: Debug>() {}
|
#[doc = "Register `FMC_CSQICR` writer"]
pub type W = crate::W<FMC_CSQICR_SPEC>;
#[doc = "Field `CTCF` writer - CTCF"]
pub type CTCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSCF` writer - CSCF"]
pub type CSCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSEF` writer - CSEF"]
pub type CSEF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSUEF` writer - CSUEF"]
pub type CSUEF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CCMDTCF` writer - CCMDTCF"]
pub type CCMDTCF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl W {
#[doc = "Bit 0 - CTCF"]
#[inline(always)]
#[must_use]
pub fn ctcf(&mut self) -> CTCF_W<FMC_CSQICR_SPEC, 0> {
CTCF_W::new(self)
}
#[doc = "Bit 1 - CSCF"]
#[inline(always)]
#[must_use]
pub fn cscf(&mut self) -> CSCF_W<FMC_CSQICR_SPEC, 1> {
CSCF_W::new(self)
}
#[doc = "Bit 2 - CSEF"]
#[inline(always)]
#[must_use]
pub fn csef(&mut self) -> CSEF_W<FMC_CSQICR_SPEC, 2> {
CSEF_W::new(self)
}
#[doc = "Bit 3 - CSUEF"]
#[inline(always)]
#[must_use]
pub fn csuef(&mut self) -> CSUEF_W<FMC_CSQICR_SPEC, 3> {
CSUEF_W::new(self)
}
#[doc = "Bit 4 - CCMDTCF"]
#[inline(always)]
#[must_use]
pub fn ccmdtcf(&mut self) -> CCMDTCF_W<FMC_CSQICR_SPEC, 4> {
CCMDTCF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FMC NAND Command Sequencer Interrupt Clear Register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fmc_csqicr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FMC_CSQICR_SPEC;
impl crate::RegisterSpec for FMC_CSQICR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`fmc_csqicr::W`](W) writer structure"]
impl crate::Writable for FMC_CSQICR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FMC_CSQICR to value 0"]
impl crate::Resettable for FMC_CSQICR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::{
fmt::{Display, Formatter, Result as FmtResult},
str::FromStr,
};
/// Represents a period of time relative to now.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Period {
/// The period of time that began at the start of the first tracked event.
All,
/// The period of time that began at midnight at the start of the current day.
Today,
/// The period of time 24 hours in length that ended at midnight at the start of the current
/// day.
Yesterday,
/// The period of time that began at midnight at the start of the last Monday that occurred
/// (including the current day).
Week,
/// The period of time 7 days (168 hours) in length that ended at midnight at the start of the
/// last Monday that occurred.
LastWeek,
/// The period of time that began at midnight at the start of the last day that occurred whose
/// number was 1 (including the current day).
Month,
/// The period of time between the midnights at the beginning of the last two occurrences of
/// days whose numbers were 1 (including the current day).
LastMonth,
}
impl FromStr for Period {
type Err = String;
fn from_str(raw: &str) -> Result<Self, Self::Err> {
match raw {
"all" | "a" => Ok(Period::All),
"today" | "t" => Ok(Period::Today),
"yesterday" | "y" => Ok(Period::Yesterday),
"week" | "this week" | "w" | "tw" => Ok(Period::Week),
"last week" | "lastweek" | "lw" => Ok(Period::LastWeek),
"month" | "this month" | "m" | "tm" => Ok(Period::Month),
"last month" | "lastmonth" | "lm" => Ok(Period::LastMonth),
_ => Err("Time period not recognised.".into()),
}
}
}
impl Display for Period {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
match self {
Period::All => write!(f, "All-Time"),
Period::Today => write!(f, "Today"),
Period::Yesterday => write!(f, "Yesterday"),
Period::Week => write!(f, "This Week"),
Period::LastWeek => write!(f, "Last Week"),
Period::Month => write!(f, "This Month"),
Period::LastMonth => write!(f, "Last Month"),
}
}
}
|
use std::{
collections::{btree_map, BTreeMap},
iter::FromIterator,
fmt,
};
use log::Level::{Debug, Trace};
use crate::{ContextHandle, Contextual, Multiplicity, DotId, FiringSet, AcesError, AcesErrorKind};
#[derive(Clone, Debug)]
pub struct State {
context: ContextHandle,
tokens: BTreeMap<DotId, Multiplicity>,
}
impl State {
pub fn from_triggers_saturated<S, I>(ctx: &ContextHandle, triggers: I) -> Self
where
S: AsRef<str>,
I: IntoIterator<Item = (S, Multiplicity)>,
{
let context = ctx.clone();
let mut ctx = ctx.lock().unwrap();
let tokens = BTreeMap::from_iter(triggers.into_iter().map(|(name, mul)| {
let dot_id = ctx.share_dot_name(name.as_ref());
let cap = ctx.get_capacity(dot_id);
if mul > cap {
warn!(
"Clamping trigger's {:?} to Capacity({}) of dot \"{}\"",
mul,
cap,
name.as_ref()
);
(dot_id, cap)
} else {
(dot_id, mul)
}
}));
State { context, tokens }
}
pub fn from_triggers_checked<S, I>(ctx: &ContextHandle, triggers: I) -> Result<Self, AcesError>
where
S: AsRef<str>,
I: IntoIterator<Item = (S, Multiplicity)>,
{
let mut error = None;
let tokens = BTreeMap::from_iter(
triggers
.into_iter()
.map(|(name, mul)| {
let dot_id = ctx.lock().unwrap().share_dot_name(name.as_ref());
(dot_id, mul)
})
.take_while(|&(dot_id, mul)| {
let cap = ctx.lock().unwrap().get_capacity(dot_id);
if mul > cap {
error = Some((dot_id, cap, mul));
false
} else {
true
}
}),
);
match error {
Some((n, c, m)) => Err(AcesErrorKind::CapacityOverflow(n, c, m).with_context(ctx)),
None => Ok(State { context: ctx.clone(), tokens }),
}
}
pub fn clear(&mut self) {
self.tokens.clear()
}
pub fn get(&self, dot_id: DotId) -> Multiplicity {
self.tokens.get(&dot_id).copied().unwrap_or_else(Multiplicity::zero)
}
pub fn set_unchecked(&mut self, dot_id: DotId, num_tokens: Multiplicity) {
match self.tokens.entry(dot_id) {
btree_map::Entry::Vacant(entry) => {
if num_tokens.is_positive() {
entry.insert(num_tokens);
}
}
btree_map::Entry::Occupied(mut entry) => {
if num_tokens.is_positive() {
*entry.get_mut() = num_tokens;
} else {
entry.remove();
}
}
}
}
/// See also [`FiringComponent::fire()`].
///
/// [`FiringComponent::fire()`]: crate::FiringComponent::fire()
pub(crate) fn decrease(
&mut self,
dot_id: DotId,
num_tokens: Multiplicity,
) -> Result<(), AcesError> {
if num_tokens.is_positive() {
if let btree_map::Entry::Occupied(mut entry) = self.tokens.entry(dot_id) {
let tokens_before = *entry.get_mut();
if tokens_before.is_positive() {
if num_tokens.is_finite() {
if let Some(tokens_after) = tokens_before.checked_sub(num_tokens) {
*entry.get_mut() = tokens_after;
} else {
return Err(AcesErrorKind::StateUnderflow(
dot_id,
tokens_before,
num_tokens,
)
.with_context(&self.context))
}
} else {
return Err(AcesErrorKind::LeakedInhibitor(dot_id, tokens_before)
.with_context(&self.context))
}
} else if num_tokens.is_finite() {
return Err(AcesErrorKind::StateUnderflow(dot_id, tokens_before, num_tokens)
.with_context(&self.context))
}
} else if num_tokens.is_finite() {
return Err(AcesErrorKind::StateUnderflow(dot_id, Multiplicity::zero(), num_tokens)
.with_context(&self.context))
}
}
Ok(())
}
/// Note: this routine doesn't check for capacity overflow.
///
/// See also [`FiringComponent::fire()`].
///
/// [`FiringComponent::fire()`]: crate::FiringComponent::fire()
pub(crate) fn increase(
&mut self,
dot_id: DotId,
num_tokens: Multiplicity,
) -> Result<(), AcesError> {
if num_tokens.is_positive() {
match self.tokens.entry(dot_id) {
btree_map::Entry::Vacant(entry) => {
entry.insert(num_tokens);
}
btree_map::Entry::Occupied(mut entry) => {
let tokens_before = *entry.get_mut();
if tokens_before.is_zero() {
*entry.get_mut() = num_tokens;
} else if tokens_before.is_finite() {
if num_tokens.is_omega() {
*entry.get_mut() = num_tokens;
} else if let Some(tokens_after) = tokens_before.checked_add(num_tokens) {
if tokens_after.is_finite() {
*entry.get_mut() = tokens_after;
} else {
return Err(AcesErrorKind::StateOverflow(
dot_id,
tokens_before,
num_tokens,
)
.with_context(&self.context))
}
} else {
return Err(AcesErrorKind::StateOverflow(
dot_id,
tokens_before,
num_tokens,
)
.with_context(&self.context))
}
}
}
}
}
Ok(())
}
pub(crate) fn transition_debug<R: rand::RngCore>(
&mut self,
ctx: &ContextHandle,
num_steps: usize,
fset: &FiringSet,
rng: &mut R,
) -> Result<Option<usize>, AcesError> {
if log_enabled!(Debug) {
if num_steps == 0 {
debug!("Go from {}", self);
} else if num_steps < 10 {
debug!("Step {} {}", num_steps, self);
} else {
debug!("Step {} {}", num_steps, self);
}
}
let enabled_fcs = fset.get_enabled(self);
if let Some(fc_id) = enabled_fcs.get_random(rng) {
if log_enabled!(Trace) {
let mut at_start = true;
for fc in enabled_fcs.iter(fset) {
if at_start {
trace!("Enabled {}", fc.with(ctx));
at_start = false;
} else {
trace!(" {}", fc.with(ctx));
}
}
}
fset.as_slice()[fc_id].fire(self)?;
Ok(Some(fc_id))
} else {
Ok(None)
}
}
/// Activates and fires a single firing component.
///
/// The firing component is randomly chosen from the enabled
/// subset of the given [`FiringSet`].
///
/// Returns the position of the activated firing component in the
/// [`FiringSet`].
pub fn transition<R: rand::RngCore>(
&mut self,
fset: &FiringSet,
rng: &mut R,
) -> Result<Option<usize>, AcesError> {
let enabled_fcs = fset.get_enabled(self);
enabled_fcs.fire_single(self, fset, rng)
}
/// Activates a random independent set of firing components and
/// fires them all in a single step.
///
/// The firing components are chosen from the enabled subset of
/// the given [`FiringSet`]. Note that two firing components are
/// independent (not adjacent) iff they have disjoint carriers.
///
/// Returns the vector of positions of activated firing components
/// in the [`FiringSet`].
pub fn parallel_transition<R: rand::RngCore>(
&mut self,
fset: &FiringSet,
rng: &mut R,
) -> Result<Vec<usize>, AcesError> {
let mut enabled_fcs = fset.get_enabled(self);
enabled_fcs.fire_parallel(self, fset, rng)
}
/// Activates the set of enabled firing components and fires them
/// all in a single step.
///
/// Returns the vector of positions of activated firing components
/// in the [`FiringSet`].
pub fn maximal_transition(&mut self, fset: &FiringSet) -> Result<Vec<usize>, AcesError> {
let enabled_fcs = fset.get_enabled(self);
enabled_fcs.fire_maximal(self, fset)?;
Ok(enabled_fcs.into())
}
}
impl fmt::Display for State {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut at_start = true;
'{'.fmt(f)?;
for (dot_id, &num_tokens) in self.tokens.iter() {
if num_tokens.is_positive() {
if at_start {
at_start = false;
} else {
','.fmt(f)?;
}
write!(f, " {}: {}", dot_id.with(&self.context), num_tokens)?;
}
}
" }".fmt(f)
}
}
#[derive(Debug)]
pub struct Goal {
targets: BTreeMap<DotId, Multiplicity>,
}
impl Goal {
pub fn from_targets_checked<S, I>(ctx: &ContextHandle, targets: I) -> Result<Self, AcesError>
where
S: AsRef<str>,
I: IntoIterator<Item = (S, Multiplicity)>,
{
let mut error = None;
let targets = BTreeMap::from_iter(
targets
.into_iter()
.map(|(name, mul)| {
let dot_id = ctx.lock().unwrap().share_dot_name(name.as_ref());
(dot_id, mul)
})
.take_while(|&(dot_id, mul)| {
let cap = ctx.lock().unwrap().get_capacity(dot_id);
if mul > cap {
error = Some((dot_id, cap, mul));
false
} else {
true
}
}),
);
match error {
Some((n, c, m)) => Err(AcesErrorKind::CapacityOverflow(n, c, m).with_context(ctx)),
None => Ok(Goal { targets }),
}
}
pub fn is_reached(&self, state: &State) -> Option<DotId> {
for (&dot_id, &target_tokens) in self.targets.iter() {
let tokens = state.get(dot_id);
if target_tokens.is_omega() {
if tokens.is_omega() {
return Some(dot_id)
}
} else if tokens >= target_tokens {
return Some(dot_id)
}
}
None
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Semantics {
Sequential,
Parallel,
Maximal,
}
impl Default for Semantics {
fn default() -> Self {
Semantics::Sequential
}
}
|
use std::cmp::min;
use std::sync::mpsc::Receiver;
use crate::channel_state::{ChannelState, Voice};
use crate::channel_state::channel_state::{EnvelopeState, Note, PortaToNoteState, TremoloState, VibratoState, WaveControl, Panning, clamp, VibratoEnvelopeState};
use crate::instrument::{LoopType, Instrument};
use crate::module_reader::{SongData, is_note_valid, Patterns};
use crate::tables::{PANNING_TAB, AudioTables};
use crate::triple_buffer::{TripleBufferWriter, Init};
use std::collections::HashMap;
use std::num::Wrapping;
use std::borrow::Borrow;
struct BPM {
pub bpm: u32,
tick_duration_in_ms: f32,
tick_duration_in_frames: usize,
}
impl BPM {
fn new(bpm: u32, rate: f32) -> BPM {
let mut ret = BPM{
bpm: 0,
tick_duration_in_ms: 0.0,
tick_duration_in_frames: 0
};
ret.update(bpm, rate);
ret
}
fn update(&mut self, bpm: u32, rate: f32) {
if bpm > 999 || bpm < 1 {return};
self.bpm = bpm;
self.tick_duration_in_ms = 2500.0 / self.bpm as f32;
self.tick_duration_in_frames = (self.tick_duration_in_ms / 1000.0 * rate) as usize;
}
}
struct PatternChange {
pattern_break: bool,
pattern_jump: bool,
row: u8,
pattern: u8,
}
impl PatternChange {
pub fn new() -> Self {
Self{
pattern_break: false,
pattern_jump: false,
row: 0,
pattern: 0
}
}
fn reset(&mut self) {
*self = Self::new();
}
fn set_break(&mut self, first_tick: bool, param:u8) {
if !first_tick {return;}
self.pattern_break = true;
self.row = param;
if self.row > 63 {self.row = 0;}
}
fn set_jump(&mut self, first_tick: bool, param:u8) {
if !first_tick {return;}
self.pattern_jump = true;
self.pattern = param;
self.row = 0;
}
}
pub fn fill<T>(arr: &mut [T], value: T)
where
T: Clone,
{
if let Some((last, elems)) = arr.split_last_mut() {
for el in elems {
el.clone_from(&value);
}
*last = value
}
}
struct GlobalVolume {
volume: u32,
last_volume_slide: u8,
}
impl GlobalVolume {
pub fn new() -> Self {
GlobalVolume { volume: 64, last_volume_slide: 0 }
}
fn volume_slide(&mut self, first_tick: bool, param: u8) {
if first_tick {
if param != 0 {
self.last_volume_slide = param;
}
} else {
let up = self.last_volume_slide >> 4;
let down = self.last_volume_slide & 0xf;
if up != 0 {
self.handle_volume_slide(first_tick, up as i8);
} else if down != 0 {
self.handle_volume_slide(first_tick, - (down as i8));
}
}
}
fn handle_volume_slide(&mut self, first_tick: bool, volume: i8) {
if !first_tick { self.volume_slide_inner(volume);}
}
// fn fine_volume_slide(&mut self, first_tick: bool, volume: i8) {
// if first_tick { self.volume_slide_inner(volume);}
// }
fn volume_slide_inner(&mut self, volume: i8) {
let mut new_volume = self.volume as i32 + volume as i32;
new_volume = if new_volume < 0 {0} else if volume > 64 { 64 } else { new_volume };
self.volume = new_volume as u32;
}
fn set_volume(&mut self, first_tick: bool, volume: u8) {
if first_tick {
self.volume = if volume <= 0x40 { volume } else { 0x40 } as u32;
}
}
}
pub enum PlaybackCmd {
IncBPM,
DecBPM,
IncSpeed,
DecSpeed,
Next,
Prev,
LoopPattern,
Restart,
Quit,
AmigaTable,
LinearTable,
PauseToggle,
FilterToggle,
DisplayToggle,
ChannelToggle(u8),
SetUserData(String, UserData),
ModifyUserDataAddUSize(String, usize),
ModifyUserDataSubUSize(String, usize),
ModifyUserDataAddISize(String, isize),
ModifyUserDataSubISize(String, isize),
SpeedUp,
SpeedDown,
SpeedReset,
SetPosition(u32),
}
#[derive(Clone)]
pub struct ChannelStatus {
pub volume: f32,
pub envelope_volume: f32,
pub global_volume: f32,
pub fadeout_volume: f32,
pub on: bool,
pub force_off: bool,
pub frequency: f32,
pub instrument: usize,
pub sample: usize,
pub sample_position: f32,
pub note: String,
pub period: u16,
pub final_panning: u8,
}
#[derive(Clone)]
pub struct PlayData {
pub name: String,
pub tick_duration_in_frames: usize,
pub tick_duration_in_ms: f32,
pub tick: u32,
pub song_position: usize,
pub song_length: u16,
pub row: usize,
pub pattern_len: usize,
pub bpm: u32,
pub speed: u32,
pub channel_status: Vec<ChannelStatus>,
pub filter: bool,
pub user_data: HashMap<String, UserData>,
}
impl Init for PlayData {
fn new() -> Self {
Self{
name: "".to_string(),
tick_duration_in_frames: 0,
tick_duration_in_ms: 0.0,
tick: 0,
song_position: 0,
song_length: 1,
row: 0,
pattern_len: 1,
bpm: 0,
speed: 0,
channel_status: vec![],
filter: false,
user_data: Default::default()
}
}
}
enum BufferState {
Start,
FillBuffer,
NextTick,
}
struct TickState {
state: BufferState,
current_buf_position: usize,
current_tick_position: usize,
}
pub enum CallbackState {
Ok,
Complete
}
pub trait BufferAdapter {
fn mix_sample(&mut self, channel:usize, value: f32, pos: usize);
fn clear(&mut self);
fn len(&mut self) -> usize;
fn num_frames(&mut self) -> usize;
fn post_process(&mut self);
}
pub struct InterleavedBufferAdaptar<'a> {
pub buf: &'a mut [f32],
}
impl BufferAdapter for InterleavedBufferAdaptar<'_> {
fn mix_sample(&mut self, channel: usize, value: f32, pos: usize) {
self.buf[pos * 2 + channel] += value;
}
fn clear(&mut self) {
self.buf.fill(0.0);
}
fn len(&mut self) -> usize {
return self.buf.len();
}
fn num_frames(&mut self) -> usize {
self.len() / 2
}
fn post_process(&mut self) {}
}
pub struct PlanarBufferAdaptar<'a> {
pub buf: [&'a mut [f32];2],
}
impl BufferAdapter for PlanarBufferAdaptar<'_> {
fn mix_sample(&mut self, channel: usize, value: f32, pos: usize) {
self.buf[channel][pos] += value;//(value - 0.5) * 2.0;
}
fn clear(&mut self) {
self.buf[0].fill(0.0);
self.buf[1].fill(0.0);
}
fn len(&mut self) -> usize {
std::cmp::min(self.buf[0].len(), self.buf[1].len())
}
fn num_frames(&mut self) -> usize {
self.len()
}
fn post_process(&mut self) {
Self::normalize_array(self.buf[0]);
Self::normalize_array(self.buf[1]);
}
}
impl<'a> PlanarBufferAdaptar<'a> {
fn normalize_array(buf: &mut [f32]) {
for element in buf.iter_mut() {
*element = (*element - 0.5f32) * 2.0f32;
}
}
}
#[derive(Clone, Debug)]
pub enum UserData {
String(String),
ISize(isize),
USize(usize)
}
// const BUFFER_SIZE: usize = 4096;
pub struct Song {
name: String,
song_position: usize,
row: usize,
tick: u32,
rate: f32,
original_rate: f32,
speed: u32,
global_volume: GlobalVolume,
song_data: SongData,
channels: Vec<ChannelState>,
pattern_change: PatternChange,
bpm: BPM,
loop_pattern: bool,
pause: bool,
filter: bool,
display: bool,
frequency_tables: Box<AudioTables>,
triple_buffer_writer: TripleBufferWriter<PlayData>,
tick_state: TickState,
user_data: HashMap<String, UserData>,
}
impl Song {
// fn get_buffer(&mut self) -> Vec<f32> {
// let mut result: Vec<f32> = vec![];
// result.reserve_exact(BUFFER_SIZE);
// while result.len() < BUFFER_SIZE {
// if !self.internal_buffer.is_empty() {
// let copy_size = std::cmp::min(BUFFER_SIZE - result.len(), self.internal_buffer.len());
// result.extend(self.internal_buffer.drain(0..copy_size));
// }
// if !self.internal_buffer.is_empty() {
// return result;
// }
// self.get_next_tick();
// }
//
// return result;
// }
pub fn new(song_data: &SongData, triple_buffer_writer: TripleBufferWriter<PlayData>, sample_rate: f32) -> Self {
let use_amiga = if song_data.use_amiga {AudioTables::calc_tables_amiga()} else {AudioTables::calc_tables_linear()};
Self {
name: song_data.name.clone(),
song_position: 0,
row: 0,
tick: 0,
rate: sample_rate,
original_rate: sample_rate,
speed: song_data.tempo as u32,
bpm: BPM::new(song_data.bpm as u32, sample_rate as f32),
global_volume: GlobalVolume::new(),
song_data: song_data.clone(),
channels: vec![ChannelState {
// instrument: &song_data.instruments[0],
// sample: &song_data.instruments[0].samples[0],
voice: Voice::new(),
note: Note::new(),
frequency: 0.0,
// du: 0.0,
// volume: Volume::new(),
// sample_position: 0.0,
// loop_started: false,
// ping: true,
volume_envelope_state: EnvelopeState::new(),
panning_envelope_state: EnvelopeState::new(),
// sustained: false,
vibrato_envelope_state: VibratoEnvelopeState::new(),
vibrato_state: VibratoState::new(),
tremolo_state: TremoloState::new(),
frequency_shift: 0.0,
period_shift: 0,
on: false,
last_porta_up: 0,
last_porta_down: 0,
last_fine_porta_up: 0,
last_fine_porta_down: 0,
last_volume_slide: 0,
last_fine_volume_slide_up: 0,
last_fine_volume_slide_down: 0,
porta_to_note: PortaToNoteState::new(),
last_sample_offset: 0,
last_panning_speed: 0,
panning: Panning::new(),
force_off: false,
glissando: false,
// last_sample: 0,
// last_sample_pos: 0.0,
vibrato_control: 0,
tremolo_control: 0,
tremor: 0,
tremor_count: 0,
multi_retrig_count: 0,
multi_retrig_volume: 0,
last_played_note: 0
}; song_data.channel_count as usize],
loop_pattern: false,
pattern_change: PatternChange::new(),
pause: false,
filter: true,
display: true,
frequency_tables: use_amiga,
triple_buffer_writer,
tick_state: TickState {
state: BufferState::Start,
current_buf_position: 0,
current_tick_position: 0
},
user_data: HashMap::new()
}
}
// fn get_linear_frequency(note: i16, fine_tune: i32, period_offset: i32) -> f32 {
// let period = 10.0 * 12.0 * 16.0 * 4.0 - (note * 16 * 4) as f32 - (fine_tune as f32) / 2.0 + period_offset as f32;
// let two = 2.0f32;
// let frequency = 8363.0 * two.powf((6.0 * 12.0 * 16.0 * 4.0 - period) / (12.0 * 16.0 * 4.0));
// frequency as f32
// }
fn queue_display(&mut self) {
let play_data = self.triple_buffer_writer.write();
play_data.name = self.name.clone();
play_data.tick_duration_in_frames = self.bpm.tick_duration_in_frames;
play_data.tick_duration_in_ms = self.bpm.tick_duration_in_ms;
play_data.tick = self.tick;
play_data.song_position = self.song_position;
play_data.song_length = self.song_data.song_length;
play_data.row = self.row;
play_data.pattern_len = self.song_data.patterns[self.song_data.pattern_order[self.song_position] as usize].rows.len() - 1;
play_data.bpm = self.bpm.bpm;
play_data.speed = self.speed;
play_data.channel_status.clear();
play_data.filter = self.filter;
play_data.user_data = self.user_data.clone();
for channel in &self.channels {
play_data.channel_status.push(ChannelStatus {
volume: channel.voice.volume.volume as f32,
envelope_volume: channel.voice.volume.envelope_vol as f32,
global_volume: channel.voice.volume.global_vol as f32,
fadeout_volume: channel.voice.volume.fadeout_vol as f32,
on: channel.on,
force_off: channel.force_off,
frequency: channel.frequency + channel.frequency_shift,
instrument: channel.voice.instrument,
sample: channel.voice.sample,
sample_position: channel.voice.sample_position,
note: channel.note.to_string(),
period: channel.note.period,
final_panning: channel.panning.final_panning,
});
}
// Song::display(&play_data, 0);
}
// pub fn get_next_tick(&mut self, buf: &mut [f32], rx: &mut Receiver<PlaybackCmd>) -> CallbackState {
// buf.fill(0.0);
// self.bpm.update(self.bpm.bpm, self.rate);
// // let mut buf = &mut unsafe { *buffer.load(Ordering::Acquire) };
// loop { // loop1
// match self.tick_state.state {
// BufferState::Start => {
// if !self.handle_commands(rx) { return CallbackState::Complete; }
//
// if self.pause {
// //let temp_buf = &mut unsafe { *buffer.load(Ordering::Acquire) };
//
//
// self.tick_state.current_buf_position = 0;
// return CallbackState::Ok;
// }
//
// self.process_tick();
//
// if self.display {
// self.queue_display();
// }
//
// self.tick_state.current_tick_position = 0usize;
// self.tick_state.state = BufferState::FillBuffer
// }
// BufferState::FillBuffer => {
// while self.tick_state.current_tick_position < self.bpm.tick_duration_in_frames {
// let ticks_to_generate = min(self.bpm.tick_duration_in_frames - self.tick_state.current_tick_position,
// AUDIO_BUF_FRAMES - self.tick_state.current_buf_position);
//
// // if let Err(_e) = crossterm::execute!(stdout(), MoveTo(0,1)) {}
// self.output_channels(self.tick_state.current_buf_position, buf, ticks_to_generate);
// self.tick_state.current_tick_position += ticks_to_generate;
// self.tick_state.current_buf_position += ticks_to_generate;
// // println!("tick: {}, buf: {}, row: {}", self.tick, current_buf_position, self.row);
// if self.tick_state.current_buf_position == AUDIO_BUF_FRAMES {
// self.tick_state.current_buf_position = 0;
// return CallbackState::Ok;
// } else {
// // We finished current with the current tick, but buffer is still not full...
// }
// }
// self.tick_state.state = BufferState::NextTick
// }
// BufferState::NextTick => {
// if !self.next_tick() { return CallbackState::Complete; }
// self.tick_state.state = BufferState::Start
// }
// }
// }
// }
pub fn set_sample_rate(&mut self, sample_rate: f32) {
self.rate = sample_rate;
self.original_rate = sample_rate;
}
pub fn get_instruments(&self) -> Vec<Instrument>{
self.song_data.instruments.clone()
}
pub fn get_patterns(&self) -> Vec<Patterns> {
self.song_data.patterns.clone()
}
pub fn get_order(&self) -> Vec<u8> {
self.song_data.pattern_order.clone()
}
pub fn get_next_tick(&mut self, buf: &mut impl BufferAdapter, rx: &mut Receiver<PlaybackCmd>) -> CallbackState {
buf.clear();
self.bpm.update(self.bpm.bpm, self.rate);
loop { // loop1
match self.tick_state.state {
BufferState::Start => {
if !self.handle_commands(rx) { return CallbackState::Complete; }
if self.pause {
self.tick_state.current_buf_position = 0;
return CallbackState::Ok;
}
self.process_tick();
if self.display {
self.queue_display();
}
self.tick_state.current_tick_position = 0usize;
self.tick_state.state = BufferState::FillBuffer
}
BufferState::FillBuffer => {
while self.tick_state.current_tick_position < self.bpm.tick_duration_in_frames {
let ticks_to_generate = min(self.bpm.tick_duration_in_frames - self.tick_state.current_tick_position,
buf.num_frames() - self.tick_state.current_buf_position);
self.output_channels(self.tick_state.current_buf_position, buf, ticks_to_generate);
self.tick_state.current_tick_position += ticks_to_generate;
self.tick_state.current_buf_position += ticks_to_generate;
if self.tick_state.current_buf_position == buf.num_frames() {
self.tick_state.current_buf_position = 0;
return CallbackState::Ok;
} else {
// We finished current with the current tick, but buffer is still not full...
}
}
self.tick_state.state = BufferState::NextTick
}
BufferState::NextTick => {
if !self.next_tick() { return CallbackState::Complete; }
self.tick_state.state = BufferState::Start
}
}
}
}
fn handle_commands(&mut self, rx: & Receiver<PlaybackCmd>) -> bool {
loop {
if let Ok(cmd) = rx.try_recv() {
match cmd {
PlaybackCmd::Quit => {
return false;
}
PlaybackCmd::Next => {
if self.song_position < (self.song_data.song_length - 1) as usize {
self.song_position += 1;
self.row = 0;
self.tick = 0;
}
}
PlaybackCmd::Prev => {
if self.song_position > 0 as usize {
self.song_position -= 1;
self.row = 0;
self.tick = 0;
}
}
PlaybackCmd::Restart => {
self.row = 0;
self.tick = 0;
}
PlaybackCmd::IncBPM => {self.bpm.update(self.bpm.bpm + 1, self.rate);}
PlaybackCmd::DecBPM => {self.bpm.update(self.bpm.bpm - 1, self.rate);}
PlaybackCmd::IncSpeed => {self.speed += 1;}
PlaybackCmd::DecSpeed => {self.speed -= 1;}
PlaybackCmd::LoopPattern => {self.loop_pattern = !self.loop_pattern;}
PlaybackCmd::PauseToggle => {self.pause = !self.pause;}
PlaybackCmd::FilterToggle => {self.filter = !self.filter;}
PlaybackCmd::DisplayToggle => {self.display = !self.display;}
PlaybackCmd::ChannelToggle(channel) => {self.channels[channel as usize].force_off = !self.channels[channel as usize].force_off;}
PlaybackCmd::AmigaTable => {self.frequency_tables = AudioTables::calc_tables_amiga();}
PlaybackCmd::LinearTable => {self.frequency_tables = AudioTables::calc_tables_linear();}
PlaybackCmd::SetUserData(key, value) => {self.user_data.insert(key, value);}
PlaybackCmd::ModifyUserDataAddUSize(key, value) => {
let entry = self.user_data.entry(key).or_insert(UserData::USize(0));
if let UserData::USize(x) = entry {
*x = (Wrapping(*x) + Wrapping(value)).0;
}
}
PlaybackCmd::ModifyUserDataSubUSize(key, value) => {
let entry = self.user_data.entry(key).or_insert(UserData::USize(0));
if let UserData::USize(x) = entry {
*x = (Wrapping(*x) - Wrapping(value)).0;
}
}
PlaybackCmd::ModifyUserDataAddISize(key, value) => {
let entry = self.user_data.entry(key).or_insert(UserData::ISize(0));
if let UserData::ISize(x) = entry {
let res = (Wrapping(*x) + Wrapping(value)).0;
*entry = UserData::ISize(res);
}
}
PlaybackCmd::ModifyUserDataSubISize(key, value) => {
let entry = self.user_data.entry(key).or_insert(UserData::ISize(0));
if let UserData::ISize(x) = entry {
let res = (Wrapping(*x) - Wrapping(value)).0;
*entry = UserData::ISize(res);
}
}
PlaybackCmd::SpeedUp => {
self.rate /= 1.1;
}
PlaybackCmd::SpeedDown => {
self.rate *= 1.1;
}
PlaybackCmd::SpeedReset => {
self.rate = self.original_rate;
}
PlaybackCmd::SetPosition(order) => {
self.pattern_change.pattern = order as u8;
self.pattern_change.pattern_jump = true;
self.pattern_change.row = 0;
self.next_tick();
}
}
}
else
{
break;
}
}
return true;
}
fn next_tick(&mut self) -> bool {
if self.song_position >= self.song_data.song_length as usize {
return false;
}
self.tick += 1;
if self.tick >= self.speed {
if self.pattern_change.pattern_break || self.pattern_change.pattern_jump {
if !self.pattern_change.pattern_jump {
self.next_pattern();
} else {
self.song_position = self.pattern_change.pattern as usize;
if self.song_position >= self.song_data.song_length as usize {
return false;
}
}
self.row = self.pattern_change.row as usize;
} else {
self.row = self.row + 1;
if self.row >= self.song_data.patterns[self.song_data.pattern_order[self.song_position as usize] as usize].rows.len() {
self.row = 0;
self.next_pattern();
}
}
// if self.song_position >= self.song_data.song_length as usize { self.song_position = self.song_data.restart_position as usize; }
if self.song_position >= self.song_data.song_length as usize { return false; }
self.tick = 0;
self.pattern_change.reset();
}
true
}
fn next_pattern(&mut self) {
if !self.loop_pattern {
self.song_position = self.song_position + 1;
}
}
fn process_tick(&mut self) {
let instruments = &self.song_data.instruments;
if self.song_position as usize >= self.song_data.pattern_order.len() {
panic!("{} {}", self.song_position, self.song_data.song_length);
}
let patterns = &self.song_data.patterns[self.song_data.pattern_order[self.song_position] as usize];
let row = &patterns.rows[self.row];
let first_tick = self.tick == 0;
let mut missing = String::new();
for (i, pattern) in row.channels.iter().enumerate() {
let mut channel = &mut self.channels[i];
let note_delay_first_tick = if pattern.is_note_delay() { self.tick == pattern.get_y() as u32 } else {first_tick};
if !channel.voice.sustained {
if channel.voice.volume.fadeout_vol - channel.voice.volume.fadeout_speed * 2 < 0 {
channel.voice.volume.fadeout_vol = 0;
} else {
channel.voice.volume.fadeout_vol -= channel.voice.volume.fadeout_speed * 2;
}
}
if first_tick && pattern.is_porta_to_note() && pattern.instrument != 0 {
let sample = self.song_data.get_sample(&channel);
channel.voice.volume.retrig(sample.volume as i32);
channel.reset_envelopes(instruments);
}
let note = if !is_note_valid(pattern.note) && pattern.is_note_delay() && !first_tick {channel.last_played_note} else {pattern.note};
// if note_delay_first_tick && note == 97 && !pattern.is_porta_to_note() { // note off
// channel.key_off(pattern.is_note_delay());
// }
if !pattern.is_porta_to_note() &&
((pattern.is_note_delay() && self.tick == pattern.get_y() as u32) ||
(!pattern.is_note_delay() && first_tick)) { // new row, set instruments
let mut reset_envelope = false;
if pattern.instrument != 0 {
let instrument = if pattern.instrument < instruments.len() as u8 {pattern.instrument as usize} else {0};
channel.voice.instrument = instrument;
if is_note_valid(note) {
channel.voice.sample = instruments[instrument].sample_indexes[(note - 1) as usize] as usize;
channel.last_played_note = note;
}
// channel.volume.retrig(channel.sample.volume as i32);
reset_envelope = true;
channel.panning.panning = self.song_data.get_sample(channel).panning;
}
if note == 97 { // note off
if !channel.key_off(instruments, pattern.is_note_delay()) {
// continue;
}
}
channel.frequency_shift = 0.0;
channel.period_shift = 0;
// let mut reset_envelope = false;
if reset_envelope {
channel.voice.volume.retrig(self.song_data.get_sample(channel).volume as i32);
channel.reset_envelopes(instruments);
}
if pattern.is_note_delay() {
channel.reset_envelopes(instruments);
}
channel.trigger_note(instruments, note, self.rate, self.frequency_tables.borrow());
}
// handle vibrato
if !first_tick && pattern.has_vibrato() { // vibrate
channel.frequency_shift = channel.vibrato_state.get_frequency_shift(WaveControl::from(channel.vibrato_control)) as f32;
channel.update_frequency(self.rate, false, self.frequency_tables.borrow());
}
// handle tremolo (not really need to do it here, but oh, well)
if !first_tick && pattern.has_tremolo() { // tremolate
channel.voice.volume.volume_shift = channel.tremolo_state.get_volume_shift(WaveControl::from(channel.tremolo_control));
}
match pattern.volume {
0x10..=0x50 => { channel.set_volume(note_delay_first_tick, pattern.volume - 0x10); } // set volume
0x60..=0x6f => { channel.volume_slide(note_delay_first_tick, -(pattern.get_volume_param() as i8)); } // Volume slide down
0x70..=0x7f => { channel.volume_slide(note_delay_first_tick, pattern.get_volume_param() as i8); } // Volume slide up
0x80..=0x8f => { channel.fine_volume_slide(note_delay_first_tick, -(pattern.get_volume_param() as i8)); } // Fine volume slide down
0x90..=0x9f => { channel.fine_volume_slide(note_delay_first_tick, pattern.get_volume_param() as i8); } // Fine volume slide up
0xa0..=0xaf => { channel.vibrato_state.set_speed((pattern.get_volume_param() * 4) as i8); } // Set vibrato speed (*4 is probably because S3M did this in order to support finer vibrato)
0xb0..=0xbf => { channel.vibrato(first_tick, 0,pattern.get_volume_param()) } // Vibrato
0xc0..=0xcf => { channel.panning.set_panning((pattern.get_volume_param() as i32) * 16);}// Set panning
0xd0..=0xdf => { // Panning slide left
let pan = channel.panning.panning as i16 - pattern.get_volume_param() as i16;
if pattern.get_volume_param() == 0 || pan < 0 {
channel.panning.set_panning(0); // FT2 bug: param 0 = pan gets set to 0
} else {
channel.panning.set_panning(pan as i32);
}
}
0xe0..=0xef => { // Panning slide right
let pan = channel.panning.panning as i16 + pattern.get_volume_param() as i16;
if pan > 255 {
channel.panning.set_panning(255);
} else {
channel.panning.set_panning(pan as i32);
}
}
0xf0..=0xff => {channel.porta_to_note(instruments, first_tick, pattern.volume & 0xf, pattern.note, self.rate, self.frequency_tables.borrow()); }// Tone porta
_ => {}
}
// handle effects
match pattern.effect {
0x0 => { // Arpeggio
if pattern.effect_param != 0 {
channel.arpeggio(self.tick, pattern.get_x(), pattern.get_y());
channel.update_frequency(self.rate, true, self.frequency_tables.borrow());
}
}
0x1 => { channel.porta_up(first_tick, pattern.effect_param, self.rate, self.frequency_tables.borrow()); } // Porta up
0x2 => { channel.porta_down(first_tick, pattern.effect_param, self.rate, self.frequency_tables.borrow()); } // Porta down
0x3 => { channel.porta_to_note(instruments, first_tick, pattern.effect_param, pattern.note, self.rate, self.frequency_tables.borrow()); } // Porta to note
0x4 => { channel.vibrato(first_tick, pattern.get_x() * 4, pattern.get_y()); } // vibrato
0x5 => { // porta to note + volume slide
channel.porta_to_note(instruments, first_tick, 0, 0, self.rate, self.frequency_tables.borrow());
channel.volume_slide_main(first_tick, pattern.effect_param);
}
0x6 => { // vibrato + volume slide
channel.vibrato(first_tick, 0, 0);
channel.volume_slide_main(first_tick, pattern.effect_param);
}
0x7 => {
channel.tremolo(first_tick, pattern.get_x() * 4, pattern.get_y());
}
0x8 => { // panning
channel.panning.set_panning(pattern.effect_param as i32);
}
0x9 => { // sample offset
if first_tick && pattern.instrument != 0 {
if pattern.effect_param != 0 {
channel.last_sample_offset = pattern.effect_param as u32 * 256;
}
channel.voice.sample_position = channel.last_sample_offset as f32;
if channel.last_sample_offset > self.song_data.get_sample(channel).length {
channel.key_off(instruments, false);
}
}
}
0xA => {
channel.volume_slide_main(note_delay_first_tick, pattern.effect_param);
}
0xB => { // Pattern Jump
self.pattern_change.set_jump(first_tick, pattern.effect_param);
}
0xC => { channel.set_volume(note_delay_first_tick, pattern.effect_param); } // set volume
0xD => { // Pattern Break
self.pattern_change.set_break(first_tick, pattern.get_x() * 10 + pattern.get_y());
}
0xE => {} // handled separately
0xF => { // set speed
if first_tick && pattern.effect_param > 0 {
if pattern.effect_param <= 0x1f {
self.speed = pattern.effect_param as u32;
} else {
self.bpm.update(pattern.effect_param as u32, self.rate);
}
}
}
0x10 => { // set global volume
self.global_volume.set_volume(note_delay_first_tick, pattern.effect_param);
}
0x11 => { // global volume slide
self.global_volume.volume_slide(note_delay_first_tick, pattern.effect_param);
}
0x14 => { // key off
if self.tick == pattern.effect_param as u32 {
channel.key_off(instruments, pattern.is_note_delay());
}
}
0x15 => { // set envelope position
let instrument = self.song_data.get_instrument(channel);
if instrument.volume_envelope.on { channel.volume_envelope_state.set_position(&instrument.volume_envelope, pattern.effect_param);}
// FT2 bug - only set panning position if volume sustain is set
if instrument.volume_envelope.sustain { channel.panning_envelope_state.set_position(&instrument.panning_envelope, pattern.effect_param);}
}
0x19 => {
channel.panning_slide(first_tick, pattern.effect_param);
}
0x1b => {
channel.multi_retrig(instruments, first_tick, self.tick, pattern.effect_param, note, self.rate, self.frequency_tables.borrow());
}
0x1d => {
channel.tremor(self.tick, pattern.effect_param);
}
_ => {missing.push_str(format!("channel: {}, eff: {:x},", i, pattern.effect).as_ref());}
}
if pattern.effect == 0xe {
match pattern.get_x() {
0x1 => { channel.fine_porta_up(first_tick, pattern.get_y(), self.rate, self.frequency_tables.borrow()); } // Porta up
0x2 => { channel.fine_porta_down(first_tick, pattern.get_y(), self.rate, self.frequency_tables.borrow()); } // Porta down
0x3 => { channel.glissando = pattern.get_y() == 1; }
0x4 => { channel.vibrato_control = pattern.get_y();}
0x7 => { channel.tremolo_control = pattern.get_y();}
0x8 => { channel.panning.set_panning((pattern.get_y() * 17) as i32);}
0x9 => { channel.retrig_note(instruments, first_tick, self.tick, pattern.get_y(), pattern.note, self.rate, self.frequency_tables.borrow());}
0xa => { channel.fine_volume_slide_up(note_delay_first_tick, pattern.get_y());} // volume slide up
0xb => { channel.fine_volume_slide_down(note_delay_first_tick, pattern.get_y());} // volume slide up
0xc => { channel.set_volume(self.tick == pattern.get_y() as u32, 0); }
0xd => {} // handled elsewhere
_ => {missing.push_str(format!("channel_state: {}, eff: 0xe{:x},", i, pattern.get_x()).as_ref());}
}
}
let instrument = self.song_data.get_instrument(channel);
let envelope_volume = channel.volume_envelope_state.handle(&instrument.volume_envelope, channel.voice.sustained, 64, false);
let mut envelope_panning = channel.panning_envelope_state.handle(&instrument.panning_envelope, channel.voice.sustained, 32, true);
// let scale = 0.9;
envelope_panning = clamp(envelope_panning, 0, 64 * 256);
channel.panning.update_envelope_panning(envelope_panning);
// FinalVol = (FadeOutVol/65536)*(EnvelopeVol/64)*(GlobalVol/64)*(Vol/64)*Scale;
// channel_state.update_frequency(self.rate);
let global_volume = self.global_volume.volume as f32 / 64.0 ;
channel.voice.volume.envelope_vol = envelope_volume as i32;
channel.voice.volume.global_vol = self.global_volume.volume as i32;
channel.voice.volume.output_volume = (channel.voice.volume.fadeout_vol as f32 / 65536.0) * (envelope_volume as f32 / 16384.0) * (channel.voice.volume.get_volume() as f32 / 64.0) * global_volume;
}
// row
}
// fn channel_borrow_mut<'b>(&'b mut self, i: usize) -> &'b mut ChannelState<'a> {
// let channels = &mut (self.channels);
// let (_, r) = channels.split_at_mut(i);
// r[0].borrow_mut()
// }
// fn porta_inner(frequncy_shift: i8, channel_state: &mut ChannelData) {
// channel_state.frequency_shift += frequency_shift;
// }
fn lerp(pos: f32, p1: f32, p2: f32) -> f32 {
let t = pos.fract();
return (1.0 - t) * p1 + t * p2;
}
// fn output_channels(&mut self, current_buf_position: usize, buf: &mut [f32; AUDIO_BUF_SIZE], ticks_to_generate: usize) {
// // let mut idx: u32 = 0;
//
// // let onecc = 1.0f32;// / cc as f32;
// // FT2 quirk: global volume is used at channel volume calculation time, not at mixing time
// //let global_volume = self.volume as f32 / 64.0 ;
// // println!("position: {:3}, row: {:3}", self.song_position, self.row);
//
//
// for channel in &mut self.channels {
//
// // idx = idx + 1;
// // if idx != 1 {continue;}
// if !channel.on || channel.force_off {
// continue;
// }
//
// // print!("channel_state: {}, instrument: {}, frequency: {}, volume: {}\n", idx, channel_state.instrument.name, channel_state.frequency, channel_state.volume);
//
// let sample = self.song_data.get_sample(channel);
//
// let vol_right = PANNING_TAB[ channel.panning.final_panning as usize] as f32 / 65536.0;
// let vol_left = PANNING_TAB[256 - channel.panning.final_panning as usize] as f32 / 65536.0;
// for i in 0..ticks_to_generate as usize {
//
// if channel.voice.sample_position as u32 >= sample.length { // we could have this after set sample position
// channel.on = false;
// break;
// }
//
// let sample_data = sample.data[channel.voice.sample_position as usize];
// let out_sample: f32 = if self.filter {
// Self::lerp(channel.voice.sample_position, sample_data, sample.data[channel.voice.sample_position as usize + 1])
// } else {
// sample_data
// };
// // channel.last_sample = sample_data;
// // channel.last_sample_pos = channel.sample_position;
//
// buf[(current_buf_position + i) * 2 + 0] += vol_left * out_sample / 4.0 * channel.voice.volume.output_volume;// * global_volume;
// buf[(current_buf_position + i) * 2 + 1] += vol_right * out_sample / 4.0 * channel.voice.volume.output_volume;// * global_volume;
//
// // if (i & 63) == 0 {print!("{}\n", channel_state.sample_position);}
// if sample.loop_type == LoopType::PingPongLoop && !channel.voice.ping {
// channel.voice.sample_position -= channel.voice.du;
// } else {
// channel.voice.sample_position += channel.voice.du;
// }
//
// if channel.voice.sample_position as u32 >= sample.length ||
// (sample.loop_type != LoopType::NoLoop && channel.voice.sample_position >= sample.loop_end as f32) {
// channel.voice.loop_started = true;
// match sample.loop_type {
// LoopType::PingPongLoop => {
// channel.voice.sample_position = (sample.loop_end - 1) as f32 - (channel.voice.sample_position - sample.loop_end as f32);
// channel.voice.ping = false;
// // channel_state.sample_position = (channel_state.sample.loop_end - 1) as f32;
// // channel_state.du = -channel_state.du;
// }
// LoopType::NoLoop => {
// channel.on = false;
// channel.voice.volume.set_volume(0);
// break;
// }
// LoopType::ForwardLoop => {
// channel.voice.sample_position = (channel.voice.sample_position - sample.loop_end as f32) + sample.loop_start as f32;
// }
// }
// }
//
// if channel.voice.loop_started && channel.voice.sample_position < sample.loop_start as f32 {
// match sample.loop_type {
// LoopType::PingPongLoop => {
// channel.voice.ping = true;
// }
// _ => {}
// }
// channel.voice.sample_position = sample.loop_start as f32 + (sample.loop_start as f32 - channel.voice.sample_position) as f32;
// }
// }
// }
// }
fn output_channels(&mut self, current_buf_position: usize, buf: &mut impl BufferAdapter, ticks_to_generate: usize) {
// let mut idx: u32 = 0;
// let onecc = 1.0f32;// / cc as f32;
// FT2 quirk: global volume is used at channel volume calculation time, not at mixing time
//let global_volume = self.volume as f32 / 64.0 ;
// println!("position: {:3}, row: {:3}", self.song_position, self.row);
for channel in &mut self.channels {
// idx = idx + 1;
// if idx != 1 {continue;}
if !channel.on || channel.force_off {
continue;
}
// print!("channel_state: {}, instrument: {}, frequency: {}, volume: {}\n", idx, channel_state.instrument.name, channel_state.frequency, channel_state.volume);
let sample = self.song_data.get_sample(channel);
let vol_right = PANNING_TAB[ channel.panning.final_panning as usize] as f32 / 65536.0;
let vol_left = PANNING_TAB[256 - channel.panning.final_panning as usize] as f32 / 65536.0;
for i in 0..ticks_to_generate as usize {
if channel.voice.sample_position as u32 >= sample.length { // we could have this after set sample position
channel.on = false;
break;
}
let sample_data = sample.data[channel.voice.sample_position as usize];
let out_sample: f32 = if self.filter {
Self::lerp(channel.voice.sample_position, sample_data, sample.data[channel.voice.sample_position as usize + 1])
} else {
sample_data
};
// channel.last_sample = sample_data;
// channel.last_sample_pos = channel.sample_position;
let final_sample = out_sample / 4.0 * channel.voice.volume.output_volume;
buf.mix_sample(0, final_sample * vol_left, current_buf_position + i);
buf.mix_sample(1, final_sample * vol_right, current_buf_position + i);
// buf[(current_buf_position + i) * 2 + 0] += vol_left * out_sample / 4.0 * channel.voice.volume.output_volume;// * global_volume;
// buf[(current_buf_position + i) * 2 + 1] += vol_right * out_sample / 4.0 * channel.voice.volume.output_volume;// * global_volume;
// if (i & 63) == 0 {print!("{}\n", channel_state.sample_position);}
if sample.loop_type == LoopType::PingPongLoop && !channel.voice.ping {
channel.voice.sample_position -= channel.voice.du;
} else {
channel.voice.sample_position += channel.voice.du;
}
if channel.voice.sample_position as u32 >= sample.length ||
(sample.loop_type != LoopType::NoLoop && channel.voice.sample_position >= sample.loop_end as f32) {
channel.voice.loop_started = true;
match sample.loop_type {
LoopType::PingPongLoop => {
channel.voice.sample_position = (sample.loop_end - 1) as f32 - (channel.voice.sample_position - sample.loop_end as f32);
channel.voice.ping = false;
// channel_state.sample_position = (channel_state.sample.loop_end - 1) as f32;
// channel_state.du = -channel_state.du;
}
LoopType::NoLoop => {
channel.on = false;
channel.voice.volume.set_volume(0);
break;
}
LoopType::ForwardLoop => {
channel.voice.sample_position = (channel.voice.sample_position - sample.loop_end as f32) + sample.loop_start as f32;
}
}
}
if channel.voice.loop_started && channel.voice.sample_position < sample.loop_start as f32 {
match sample.loop_type {
LoopType::PingPongLoop => {
channel.voice.ping = true;
}
_ => {}
}
channel.voice.sample_position = sample.loop_start as f32 + (sample.loop_start as f32 - channel.voice.sample_position) as f32;
}
}
}
}
}
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![warn(clippy::all)]
#![allow(clippy::boxed_local, clippy::needless_pass_by_value)]
#![allow(clippy::blacklisted_name)]
macro_rules! boxit {
($init:expr, $x:ty) => {
let _: Box<$x> = Box::new($init);
};
}
fn test_macro() {
boxit!(Vec::new(), Vec<u8>);
}
pub fn test(foo: Box<Vec<bool>>) {
println!("{:?}", foo.get(0))
}
pub fn test2(foo: Box<Fn(Vec<u32>)>) {
// pass if #31 is fixed
foo(vec![1, 2, 3])
}
pub fn test_local_not_linted() {
let _: Box<Vec<bool>>;
}
fn main() {
test(Box::new(Vec::new()));
test2(Box::new(|v| println!("{:?}", v)));
test_macro();
test_local_not_linted();
}
|
fn longest_consecutive(nums: Vec<i32>) -> i32 {
use std::collections::{HashMap, HashSet};
let mut hm: HashMap<i32, i32> = HashMap::new();
let hs: HashSet<i32> = nums.into_iter().collect();
for n in &hs {
if !hs.contains(&(*n - 1)) {
hm.entry(*n).or_insert(1);
for i in 1..hs.len() {
if hs.contains(&(*n + i as i32)) {
hm.entry(*n).and_modify(|v| *v += 1);
} else {
break;
}
}
}
}
// let v: Vec<_> = hm.values().collect();
// **v.iter().max().unwrap()
let v = hm.values().collect::<Vec<&i32>>();
if let Some(v) = v.iter().max() {
**v
} else {
0
}
}
fn main() {
assert_eq!(longest_consecutive(vec![100, 4, 200, 1, 3, 2]), 4);
assert_eq!(longest_consecutive(vec![0, 3, 7, 2, 5, 8, 4, 6, 0, 1]), 9);
assert_eq!(
longest_consecutive(vec![9, 1, 4, 7, 3, -1, 0, 5, 8, -1, 6]),
7
);
}
|
use std::sync::{Arc, RwLock};
use std::time::{Duration, Instant};
use crate::frame::Frame;
pub trait Animation {
fn next_frame(&mut self, frame: &mut Frame);
fn reset(&mut self);
fn with_fps(self, fps: f32) -> FixedFPSAnimation<Self>
where
Self: Sized,
{
FixedFPSAnimation {
inner: self,
interval: Duration::from_secs_f32(1.0 / fps),
last_frame: Instant::now(),
}
}
fn with_duration(self, duration: Duration) -> TimeLimitedAnimation<Self>
where
Self: Sized,
{
TimeLimitedAnimation {
inner: self,
duration,
started: Instant::now(),
}
}
fn chain<U: Animation>(self, other: U) -> ChainedAnimation<Self, U>
where
Self: Sized,
{
ChainedAnimation {
a: self,
b: other,
current: false,
}
}
fn repeat(self, times: usize) -> RepeatedAnimation<Self>
where
Self: Sized,
{
RepeatedAnimation {
inner: self,
loops: times,
count: 0,
}
}
}
pub trait TerminatingAnimation: Animation {
fn ended(&self) -> bool;
}
pub trait MaybeTerminatingAnimation: Animation {
fn maybe_ended(&self) -> bool;
}
impl<T: Animation> MaybeTerminatingAnimation for T {
default fn maybe_ended(&self) -> bool {
false
}
}
macro_rules! impl_maybe_term {
( $( $name:ident $(< $( $lt:tt $( : $clt:tt $(+ $dlt:tt )* )? ),+ >)? ),+ ) => {
$(
impl $(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)?
MaybeTerminatingAnimation
for $name
$(< $( $lt ),+ >)?
{
fn maybe_ended(&self) -> bool {
self.ended()
}
}
)+
}
}
#[derive(Debug)]
#[cfg_attr(feature = "visual", derive(bevy_inspector_egui::Inspectable))]
pub struct FixedFPSAnimation<T> {
inner: T,
#[cfg_attr(feature = "visual", inspectable(ignore))]
interval: Duration,
#[cfg_attr(feature = "visual", inspectable(ignore))]
last_frame: Instant,
}
impl<T: Animation> Animation for FixedFPSAnimation<T> {
fn next_frame(&mut self, frame: &mut Frame) {
let now = Instant::now();
if now.duration_since(self.last_frame) > self.interval {
self.inner.next_frame(frame);
self.last_frame = now;
}
}
fn reset(&mut self) {
self.inner.reset();
}
}
impl<T: TerminatingAnimation> TerminatingAnimation for FixedFPSAnimation<T> {
fn ended(&self) -> bool {
self.inner.ended()
}
}
impl_maybe_term!(FixedFPSAnimation<T: TerminatingAnimation>);
#[derive(Debug)]
#[cfg_attr(feature = "visual", derive(bevy_inspector_egui::Inspectable))]
pub struct TimeLimitedAnimation<T> {
inner: T,
#[cfg_attr(feature = "visual", inspectable(ignore))]
duration: Duration,
#[cfg_attr(feature = "visual", inspectable(ignore))]
started: Instant,
}
impl<T: Animation> Animation for TimeLimitedAnimation<T> {
fn next_frame(&mut self, frame: &mut Frame) {
self.inner.next_frame(frame);
}
fn reset(&mut self) {
self.inner.reset();
self.started = Instant::now();
}
}
impl<T: MaybeTerminatingAnimation> TerminatingAnimation for TimeLimitedAnimation<T> {
fn ended(&self) -> bool {
if Instant::now().duration_since(self.started) > self.duration {
dbg!("resetting time animation", std::any::type_name::<T>());
return true;
}
self.inner.maybe_ended()
}
}
#[derive(Debug)]
#[cfg_attr(feature = "visual", derive(bevy_inspector_egui::Inspectable))]
pub struct ChainedAnimation<T, U> {
a: T,
b: U,
current: bool,
}
impl<T: TerminatingAnimation, U: Animation> Animation for ChainedAnimation<T, U> {
fn next_frame(&mut self, frame: &mut Frame) {
if !self.current {
if self.a.ended() {
dbg!(
"switching chain animation",
std::any::type_name::<T>(),
std::any::type_name::<U>()
);
self.current = !self.current;
self.b.reset();
} else {
self.a.next_frame(frame)
}
}
if self.current {
self.b.next_frame(frame)
}
}
fn reset(&mut self) {
self.a.reset();
self.current = false;
}
}
// chained animations only terminate if the final animation terminates
impl<T: TerminatingAnimation, U: TerminatingAnimation> TerminatingAnimation
for ChainedAnimation<T, U>
{
fn ended(&self) -> bool {
self.current && self.b.ended()
}
}
impl_maybe_term!(ChainedAnimation<T: TerminatingAnimation, U: TerminatingAnimation>);
#[derive(Debug)]
#[cfg_attr(feature = "visual", derive(bevy_inspector_egui::Inspectable))]
pub struct RepeatedAnimation<T> {
inner: T,
loops: usize,
count: usize,
}
impl<T: TerminatingAnimation> Animation for RepeatedAnimation<T> {
fn next_frame(&mut self, frame: &mut Frame) {
if self.inner.ended() && self.count < self.loops {
self.inner.reset();
self.count += 1;
dbg!(
"repeating repeat animation",
std::any::type_name::<T>(),
self.count
);
}
self.inner.next_frame(frame);
}
fn reset(&mut self) {
self.inner.reset();
self.count = 0;
}
}
impl<T: TerminatingAnimation> TerminatingAnimation for RepeatedAnimation<T> {
fn ended(&self) -> bool {
self.count >= self.loops && self.inner.ended()
}
}
impl_maybe_term!(RepeatedAnimation<T: TerminatingAnimation>);
impl<T: Animation> Animation for RwLock<T> {
fn next_frame(&mut self, frame: &mut Frame) {
self.write().unwrap().next_frame(frame);
}
fn reset(&mut self) {
self.write().unwrap().reset();
}
}
impl<T: TerminatingAnimation> TerminatingAnimation for RwLock<T> {
fn ended(&self) -> bool {
self.read().unwrap().ended()
}
}
impl<T: Animation> Animation for Arc<RwLock<T>> {
fn next_frame(&mut self, frame: &mut Frame) {
self.write().unwrap().next_frame(frame);
}
fn reset(&mut self) {
self.write().unwrap().reset();
}
}
impl<T: TerminatingAnimation> TerminatingAnimation for Arc<RwLock<T>> {
fn ended(&self) -> bool {
self.read().unwrap().ended()
}
}
|
extern crate criterion;
use buffered_index_writer::*;
use criterion::{Criterion, *};
use directory::RamDirectory;
fn pseudo_rand(i: u32) -> u32 {
if i % 2 == 0 {
((i as u64 * i as u64) % 16_000) as u32
} else {
i % 3
}
}
fn criterion_benchmark(c: &mut Criterion) {
let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic);
let parameters = vec![250, 2_500, 100_000, 150_000, 190_000, 250_000, 2_500_000];
let benchmark = ParameterizedBenchmark::new(
"buffered",
|b, i| {
b.iter(|| {
let directory = Box::new(RamDirectory::create());
let mut ind = BufferedIndexWriter::new_unstable_sorted(directory);
for i in 0..*i {
ind.add(i, pseudo_rand(i)).unwrap();
}
})
},
parameters,
)
.plot_config(plot_config)
.throughput(|s| Throughput::Bytes(s * 8 as u32));
c.bench("insert throughput", benchmark);
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
//! This is the documentation for the SE library.
//!
//! * Developped by Georg Bramm
//! * Type: encryption (structured)
//! * Setting: PRP, PRF
//! * Date: 12/2019
//!
#![allow(dead_code)]
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate crypto;
extern crate bincode;
extern crate rand;
extern crate rust_hope;
extern crate mongodb;
#[macro_use]
extern crate rulinalg;
#[macro_use]
extern crate primitive_types;
extern crate fpe;
extern crate num_traits;
extern crate osm_xml as osm;
pub mod utils;
pub mod schemes;
|
pub mod bisect {
pub fn bisect_left(a: &Vec<i32>, x: i32) -> usize {
let mut lower = 0;
let mut higher = a.len();
while lower < higher {
let s = (higher + lower) / 2;
if a[s] < x {
lower = s + 1;
}
else {
higher = s;
}
}
return lower;
}
pub fn insort(a: &mut Vec<i32>, x: i32) {
let i = bisect_left(a, x);
a.insert(i, x);
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_bisect_left_empty() {
let a = vec![];
let i = super::bisect::bisect_left(&a, 3);
assert_eq!(0, i);
}
#[test]
fn test_bisect_left_3() {
let a = vec![1,2,4,8,9];
let i = super::bisect::bisect_left(&a, 3);
assert_eq!(2, i);
}
#[test]
fn test_bisect_left_3_with_dup() {
let a = vec![1,2,3,4,8,9];
let i = super::bisect::bisect_left(&a, 3);
assert_eq!(2, i);
}
#[test]
fn test_bisect_left_3_with_dup2() {
let a = vec![1,2,3,3,4,8,9];
let i = super::bisect::bisect_left(&a, 3);
assert_eq!(2, i);
}
#[test]
fn test_bisect_left_front() {
let a = vec![1,2,3,4,8,9];
let i = super::bisect::bisect_left(&a, 0);
assert_eq!(0, i);
}
#[test]
fn test_bisect_left_back() {
let a = vec![1,2,3,4,8,9];
let i = super::bisect::bisect_left(&a, 10);
assert_eq!(6, i);
}
#[test]
fn test_insort_5() {
let mut a = vec![1,2,3,4,8,9];
super::bisect::insort(&mut a, 5);
assert_eq!(5, a[4]);
}
#[test]
fn test_insort_0() {
let mut a = vec![1,2,3,4,8,9];
super::bisect::insort(&mut a, 0);
assert_eq!(0, a[0]);
}
#[test]
fn test_insort_10() {
let mut a = vec![1,2,3,4,8,9];
super::bisect::insort(&mut a, 10);
assert_eq!(10, a[6]);
}
}
|
pub trait DataType: Sized + Copy {
type Signed: num::traits::Signed + Sized + Copy;
type Unsigned: num::traits::Unsigned + Sized + Copy;
fn byte_len(self) -> usize {
std::mem::size_of::<Self>()
}
fn signed(self) -> Self::Signed;
fn unsigned(self) -> Self::Unsigned;
fn store_signed(&mut self, i: Self::Signed);
fn store_unsigned(&mut self, i: Self::Unsigned);
}
macro_rules! data_type_impl {
($(($name:ident, $unsigned:ty, $signed:ty)),*) => ($(
#[derive(Copy, Clone)]
pub struct $name($unsigned);
impl DataType for $name {
type Signed = $signed;
type Unsigned = $unsigned;
fn signed(self) -> Self::Signed {
unsafe {
std::mem::transmute::<Self::Unsigned, Self::Signed>(self.0)
}
}
fn unsigned(self) -> Self::Unsigned {
self.0
}
fn store_signed(&mut self, i: Self::Signed) {
unsafe {
self.0 = std::mem::transmute::<Self::Signed, Self::Unsigned>(i);
}
}
fn store_unsigned(&mut self, i: Self::Unsigned) {
self.0 = i;
}
}
)*)
}
data_type_impl!(
(HalfWord, u8, i8),
(Word, u16, i16),
(DoubleWord, u32, i32),
(QuadWord, u64, i64),
(OctoWord, u128, i128)
);
|
use lapin::{
Connection, ConnectionProperties, Channel, ExchangeKind, Queue,
options::{ExchangeDeclareOptions, QueueBindOptions, QueueDeclareOptions}
};
use amq_protocol_types::FieldTable;
mod consumer_options;
mod consumer;
mod publisher_options;
mod publisher;
pub use consumer_options::ConsumerOptions;
pub use consumer::Consumer;
pub use publisher_options::PublisherOptions;
pub use publisher::Publisher;
pub fn connect(broker_address: &String) -> Connection {
Connection::connect(&broker_address, ConnectionProperties::default())
.wait()
.expect(format!("Error connecting to '{}'", broker_address).as_str())
}
pub fn create_channel(connection: &Connection) -> Channel {
connection.create_channel().wait().expect("Error creating connection")
}
pub fn create_exchange(channel: &Channel,
exchange_name: &String,
exchange_type: ExchangeKind,
exchange_options: ExchangeDeclareOptions,
)
{
channel.exchange_declare(&exchange_name, exchange_type, exchange_options, FieldTable::default())
.wait()
.unwrap()
}
pub fn create_queue(channel: &Channel,
queue_name: &String,
queue_options: QueueDeclareOptions,
) -> Queue
{
channel.queue_declare(&queue_name, queue_options, FieldTable::default())
.wait()
.expect("Error creating queue")
}
pub fn bind_queue(channel: &Channel,
queue_name: &String,
exchange_name: &String,
routing_key: &String,
bind_options: QueueBindOptions,
)
{
channel.queue_bind(&queue_name, &exchange_name, &routing_key, bind_options, FieldTable::default())
.wait()
.unwrap()
}
pub fn close_channel(channel: &Channel) {
channel.close(200, "OK").wait().expect("Channel close");
}
pub fn close_connection(connection: &Connection) {
connection.close(200, "OK").wait().expect("Connection close");
} |
use crate::utils;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn get_web_cam() {
let _document = utils::get_document();
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// MetricMetadata : Object with all metric related metadata.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricMetadata {
/// Metric description.
#[serde(rename = "description", skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// Name of the integration that sent the metric if applicable.
#[serde(rename = "integration", skip_serializing_if = "Option::is_none")]
pub integration: Option<String>,
/// Per unit of the metric such as `second` in `bytes per second`.
#[serde(rename = "per_unit", skip_serializing_if = "Option::is_none")]
pub per_unit: Option<String>,
/// A more human-readable and abbreviated version of the metric name.
#[serde(rename = "short_name", skip_serializing_if = "Option::is_none")]
pub short_name: Option<String>,
/// StatsD flush interval of the metric in seconds if applicable.
#[serde(rename = "statsd_interval", skip_serializing_if = "Option::is_none")]
pub statsd_interval: Option<i64>,
/// Metric type such as `gauge` or `rate`.
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub _type: Option<String>,
/// Primary unit of the metric such as `byte` or `operation`.
#[serde(rename = "unit", skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
}
impl MetricMetadata {
/// Object with all metric related metadata.
pub fn new() -> MetricMetadata {
MetricMetadata {
description: None,
integration: None,
per_unit: None,
short_name: None,
statsd_interval: None,
_type: None,
unit: None,
}
}
}
|
use std;
fn main () {
let data = "abracadabra";
// let _x = str::windowed(3u, data);
let _y = str::lines_iter(data, {|x| io::println(x)});
let _z = str::words_iter(data, {|x| io::println(x)});
let _a = str::all(data, char::is_uppercase);
let _b = str::any(data, char::is_uppercase);
// let _c = str::map(data, char::to_upper);
}
|
use super::ECCurve::big::BIG;
use super::ECCurve::dbig::DBIG;
use super::ECCurve::ecp::ECP;
pub type BigNum = BIG;
pub type DoubleBigNum = DBIG;
pub type GroupG1 = ECP;
|
/*===============================================================================================*/
// Copyright 2016 Kyle Finlay
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*===============================================================================================*/
use ::util::math::Util;
/*===============================================================================================*/
/*------COLOUR STRUCT----------------------------------------------------------------------------*/
/*===============================================================================================*/
/// The Colour struct
///
/// Used to represent RGBA colours.
/// 32-bit floats are used for the values.
#[derive (Copy, Clone, Default, Serialize, Deserialize)]
pub struct Colour {
// Public
/// Red component
pub r: f32,
/// Green component
pub g: f32,
/// Blue component
pub b: f32,
/// Alpha component
pub a: f32
}
/*===============================================================================================*/
/*------COLOUR PUBLIC METHODS--------------------------------------------------------------------*/
/*===============================================================================================*/
impl Colour {
/// Formats the colour as a string.
///
/// # Return value
/// The colour formatted as a string.
///
/// # Examples
/// ```
/// # use ion_core::util::math::Colour;
/// #
/// let colour = Colour {r : 0.5, g : 1.0, b : 0.0, a : 1.0};
/// println! ("Colour = {}", colour.to_string ());
/// ```
/// ```c
/// Output : Colour = 0.5, 1.0, 0.0, 1.0
pub fn to_string (&self) -> String {
format! ("{}, {}, {}, {}", self.r, self.g, self.b, self.a)
}
/*===============================================================================================*/
/*------COLOUR PUBLIC STATIC METHODS-------------------------------------------------------------*/
/*===============================================================================================*/
/// Creates a colour with default values.
///
/// # Return value
/// A new colour instance.
///
/// # Examples
/// ```
/// # use ion_core::util::math::Colour;
/// #
/// let col = Colour::new (); // Resulting colour is white
pub fn new () -> Colour {
Colour {r: 1.0,
g: 1.0,
b: 1.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Creates a new colour from an RGBA value.
///
/// Each colour channel has a 0-255 range.
///
/// # Arguments
/// * `r` - Red colour channel.
/// * `g` - Green colour channel.
/// * `b` - Blue colour channel.
/// * `a` - Aplha colour channel.
///
/// # Return value
/// A new colour instance.
pub fn from_rgba (r: i32, g: i32, b: i32, a: i32) -> Colour {
Colour {r: Util::clamp (r as f32, 0.0, 255.0) / 255.0,
g: Util::clamp (g as f32, 0.0, 255.0) / 255.0,
b: Util::clamp (b as f32, 0.0, 255.0) / 255.0,
a: Util::clamp (a as f32, 0.0, 255.0) / 255.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Creates a new colour from a hexadecimal value.
///
/// # Arguments
/// * `hex_value` - A string containing a hex colour value.
///
/// # Return value
/// A new colour instance.
///
/// # Examples
/// ```
/// # use ion_core::util::math::Colour;
/// #
/// let col = Colour::from_hex ("20b2aaff");
/// println! ("{}", col.to_string ());
/// ```
/// ```c
/// Output : 0.1234902, 0.69803923, 0.6666667, 1
pub fn from_hex (hex_value: &str) -> Colour {
assert! (hex_value.len () == 8, "Hex colour value must be 8 characters long");
Colour {r: (i32::from_str_radix (&hex_value [0..2], 16).unwrap () as f32) / 255.0,
g: (i32::from_str_radix (&hex_value [2..4], 16).unwrap () as f32) / 255.0,
b: (i32::from_str_radix (&hex_value [4..6], 16).unwrap () as f32) / 255.0,
a: (i32::from_str_radix (&hex_value [6..8], 16).unwrap () as f32) / 255.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Creates a new colour from HSV (hue, saturation, value).
///
/// # Arguments
/// * `hue` - The HSV hue value.
/// * `saturation` - The HSV saturation value.
/// * `value` - The HSV colour value.
///
/// # Return value
/// A new colour instance.
///
/// # Examples
/// ```
/// # use ion_core::util::math::Colour;
/// #
/// let col = Colour::from_hsv (197, 43, 92);
/// println! ("{}", col.to_string ());
/// ```
/// ```c
/// Output : 0.5244, 0.80791336, 0.92, 1
pub fn from_hsv (hue: i32, saturation: i32, value: i32) -> Colour {
// Clamp all values to respective ranges
let h_clamped = Util::clamp (hue as f32, 0.0, 360.0);
let s_clamped = Util::clamp (saturation as f32, 0.0, 100.0) / 100.0;
let v_clamped = Util::clamp (value as f32, 0.0, 100.0) / 100.0;
// Set the chroma, Hdash, x, and min values
let c = v_clamped * s_clamped;
let h = h_clamped / 60.0;
let x = c * (1.0 - (h % 2.0 - 1.0));
let m = v_clamped - c;
// Get the raw rgb values
let rgb = match h {
_ if h < 1.0 => (c, x, 0.0),
_ if h < 2.0 => (x, c, 0.0),
_ if h < 3.0 => (0.0, c, x),
_ if h < 4.0 => (0.0, x, c),
_ if h < 5.0 => (x, 0.0, c),
_ if h < 6.0 => (c, 0.0, x),
_ => (0.0, 0.0, 0.0)
};
// Return the final colour (rgb + min)
Colour {r: rgb.0 + m,
g: rgb.1 + m,
b: rgb.2 + m,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Outputs the colour as an RGBA value.
///
/// It returns a tuple, containing four i32 values.
///
/// # Arguments
/// * `colour` - The colour to convert to a rgba value.
///
/// # Return value
/// A tuple containing the `r`, `g`, `b`, and `a` values.
pub fn to_rgba (colour: &Colour) -> (i32, i32, i32, i32) {
((Util::clamp (colour.r * 255.0, 0.0, 255.0)) as i32,
(Util::clamp (colour.g * 255.0, 0.0, 255.0)) as i32,
(Util::clamp (colour.b * 255.0, 0.0, 255.0)) as i32,
(Util::clamp (colour.a * 255.0, 0.0, 255.0)) as i32)
}
/*-----------------------------------------------------------------------------------------------*/
/// Outputs the colour as a hex value.
///
/// # Arguments
/// * `colour` - The colour to convert to a hex value.
///
/// # Return value
/// A String containing a hex colour value.
///
/// # Examples
/// ```
/// # use ion_core::util::math::Colour;
/// #
/// let col = Colour {r : 0.5, g : 0.2, b : 0.9, a : 1.0};
/// println! ("{}", Colour::to_hex (&col));
/// ```
/// ```c
/// Output : 7f33e5ff
pub fn to_hex (colour: &Colour) -> String {
format! ("{:x}{:x}{:x}{:x}", (colour.r * 255.0) as i32,
(colour.g * 255.0) as i32,
(colour.b * 255.0) as i32,
(colour.a * 255.0) as i32)
}
/*-----------------------------------------------------------------------------------------------*/
/// Outputs the colour as hsv
///
/// # Arguments
/// * `colour` - The colour to convert to a hsv value
///
/// # Return value
/// A tuple containing a HSV colour.
pub fn to_hsv (colour: &Colour) -> (i32, i32, i32) {
// Get the min, max, and delta colour values
let min = Util::min (Util::min (colour.r, colour.g), colour.b);
let max = Util::max (Util::max (colour.r, colour.g), colour.b);
let del = max - min;
// Set initial hsv value
let mut hsv = (0.0, (del / max) * 100.0, max * 100.0);
// If delta is not 0, calc h value
if del != 0.0 {
if colour.r >= max {
hsv.0 = (colour.g - colour.b) / del;
if hsv.0 < 0.0 {
hsv.0 += 6.0;
}
}
else if colour.g >= max {
hsv.0 = ((colour.b - colour.r) / del) + 2.0;
}
else if colour.b >= max {
hsv.0 = ((colour.r - colour.g) / del) + 4.0;
}
// Set final h value
hsv.0 *= 60.0;
}
(hsv.0 as i32, hsv.1 as i32, hsv.2 as i32)
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour red (1.0, 0.0, 0.0, 1.0),
///
/// # Return value
/// A new colour instance representing red.
pub fn red () -> Colour {
Colour {r: 1.0,
g: 0.0,
b: 0.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour Green (0.0, 1.0, 0.0, 1.0).
///
/// # Return value
/// A new colour instance representing green.
pub fn green () -> Colour {
Colour {r: 0.0,
g: 1.0,
b: 0.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour Blue (0.0, 0.0, 1.0, 1.0).
///
/// # Return value
/// A new colour instance representing blue.
pub fn blue () -> Colour {
Colour {r: 0.0,
g: 0.0,
b: 1.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour Black (0.0, 0.0, 0.0, 1.0).
///
/// # Return value
/// A new colour instance representing black.
pub fn black () -> Colour {
Colour {r: 0.0,
g: 0.0,
b: 0.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour white (1.0, 1.0, 1.0, 1.0).
///
/// # Return value
/// A new colour instance representing white.
pub fn white () -> Colour {
Colour {r: 1.0,
g: 1.0,
b: 1.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour yellow (1.0, 1.0, 0.0, 1.0).
///
/// # Return value
/// A new colour instance representing yellow.
pub fn yellow () -> Colour {
Colour {r: 1.0,
g: 1.0,
b: 0.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour magenta (1.0, 0.0, 1.0, 1.0).
///
/// # Return value
/// A new colour instance representing magenta.
pub fn magenta () -> Colour {
Colour {r: 1.0,
g: 0.0,
b: 1.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour cyan (0.0, 1.0, 1.0, 1.0).
///
/// # Return value
/// A new colour instance representing cyan.
pub fn cyan () -> Colour {
Colour {r: 0.0,
g: 1.0,
b: 1.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour light grey (0.75, 0.75, 0.75, 1.0).
///
/// # Return value
/// A new colour instance representing light grey.
pub fn light_grey () -> Colour {
Colour {r: 0.75,
g: 0.75,
b: 0.75,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour grey (0.5, 0.5, 0.5, 1.0).
///
/// # Return value
/// A new colour instance representing grey.
pub fn grey () -> Colour {
Colour {r: 0.5,
g: 0.5,
b: 0.5,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns the colour dark grey.
///
/// # Return value
/// A new colour instance representing dark grey.
pub fn dark_grey () -> Colour {
Colour {r: 0.25,
g: 0.25,
b: 0.25,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns a completely transparent colour (0, 0, 0, 0).
///
/// # Return value
/// A new colour instance representing pure transparency.
pub fn clear () -> Colour {
Colour {r: 0.0,
g: 0.0,
b: 0.0,
a: 1.0}
}
/*-----------------------------------------------------------------------------------------------*/
// TODO: Add example to documentation
/// Linearly interpolates between two colours.
///
/// # Arguments
/// * `start` - The starting colour.
/// * `end` - The end colour.
/// * `percentage` - How far between `start` and `end` the resulting colour will be.
///
/// # Return value
/// A new colour instance.
pub fn lerp (start: &Colour, end: &Colour, percentage: f32) -> Colour {
Colour {r: Util::lerp (start.r, end.r, percentage),
g: Util::lerp (start.g, end.g, percentage),
b: Util::lerp (start.b, end.b, percentage),
a: Util::lerp (start.a, end.a, percentage)}
}
/*-----------------------------------------------------------------------------------------------*/
/// Linearly interpolates between two colours without clamping.
///
/// # Arguments
/// * `start` - The starting colour.
/// * `end` - The end colour.
/// * `percentage` - How far between `start` and `end` the resulting colour will be.
///
/// # Return value
/// A new colour instance.
pub fn lerp_unclamped (start: &Colour, end: &Colour, percentage: f32) -> Colour {
Colour {r: Util::lerp_unclamped (start.r, end.r, percentage),
g: Util::lerp_unclamped (start.g, end.g, percentage),
b: Util::lerp_unclamped (start.b, end.b, percentage),
a: Util::lerp_unclamped (start.a, end.a, percentage)}
}
}
|
use anyhow::{anyhow, Result};
use rusoto_core::{region, HttpClient};
use rusoto_credential::ProfileProvider;
use rusoto_ec2::Ec2Client;
use rusoto_ecs::EcsClient;
use structopt::StructOpt;
mod application;
mod domain;
mod settings;
mod ui;
/// Connect to AWS EC2 hosts via a Bastion / Jump host
#[derive(StructOpt)]
#[structopt(name = "heimdallr", global_settings = &[structopt::clap::AppSettings::AllowLeadingHyphen])]
struct Heimdallr {
/// Profile name as specified in your configuration file
#[structopt(name = "profile", long, short = "p", default_value = "default")]
profile: String,
#[structopt(subcommand)]
cmd: Command,
}
#[derive(StructOpt)]
enum Command {
/// List all running instances
List,
/// Add your IP to a security group to allow ingress
Grant {
/// Override the security group id that controls ingress to the bastion server for the
/// specified profile
#[structopt(name = "security-group-id", long, short = "s")]
security_group_id: Option<String>,
/// Descriptive text to include with your security group entry
#[structopt(name = "description", long, short = "d")]
description: Option<String>,
},
/// Revoke your IP from a security group to prevent future ingress
Revoke {
/// Override the security group id that controls ingress to the bastion server for the
/// specified profile
#[structopt(name = "security-group-id", long, short = "s")]
security_group_id: Option<String>,
},
/// Connect to a running instance
Connect {
/// Override the host name of the bastion server for the specified profile
#[structopt(name = "dns-name", long, short = "d")]
dns_name: Option<String>,
/// Override the ssh port of the bastion server for the specified profile
#[structopt(name = "bastion-port", long, short = "p")]
bastion_port: Option<u16>,
/// Override the ssh user of the bastion server for the specified profile
#[structopt(name = "bastion-user", long, short = "u")]
bastion_user: Option<String>,
/// Override the user of the ec2 server for the specified profile
#[structopt(name = "ec2-user", long, short = "e")]
ec2_user: Option<String>,
// TODO(mmk) Is there a better variable type to verify that the file exists?
/// Override the ssh identity file to use for the specified profile
#[structopt(name = "identity-file", long, short = "i")]
identity_file: Option<String>,
/// The target to connect. Supported formats are host, user@host, cluster#service,
/// cluster#service#container
#[structopt()]
target: String,
/// An optional command to execute on the specified target
#[structopt(default_value = "bash")]
cmd: Vec<String>,
},
/// Update this executable to the latest version
Update,
}
#[tokio::main]
async fn main() -> Result<()> {
let settings = settings::Settings::new()?;
let opt = Heimdallr::from_args();
let profile_settings = match settings.profiles.get(&opt.profile) {
Some(s) => Ok(s),
_ => Err(anyhow!(
"Could not find specified profile entry {}. Please check your configuration file.",
&opt.profile
)),
}?;
let mut provider = ProfileProvider::new()?;
provider.set_profile(profile_settings.aws_profile.clone());
let region = profile_settings.aws_region.parse::<region::Region>()?;
let ec2_client = Ec2Client::new_with(HttpClient::new()?, provider.clone(), region.clone());
let ecs_client = EcsClient::new_with(HttpClient::new()?, provider, region);
let security_group_handler = application::security_groups::Handler::new(&ec2_client);
let list_instances_handler = application::list_instances::Handler::new(&ec2_client);
let connect_handler = application::connect::Handler::new(&ecs_client, &ec2_client);
match opt.cmd {
Command::List => ui::list::list(list_instances_handler).await,
Command::Grant {
security_group_id,
description,
} => {
security_group_handler
.grant_access(
security_group_id.unwrap_or_else(|| profile_settings.security_group_id.clone()),
description,
)
.await
}
Command::Revoke { security_group_id } => {
security_group_handler
.revoke_access(
security_group_id.unwrap_or_else(|| profile_settings.security_group_id.clone()),
)
.await
}
Command::Connect {
dns_name,
bastion_port,
bastion_user,
ec2_user,
identity_file,
target,
cmd,
} => {
ui::connect::connect(
connect_handler,
dns_name.unwrap_or_else(|| profile_settings.dns_name.clone()),
bastion_port.unwrap_or_else(|| profile_settings.bastion_port),
bastion_user.unwrap_or_else(|| profile_settings.bastion_user.clone()),
ec2_user.unwrap_or_else(|| profile_settings.ec2_user.clone()),
identity_file.unwrap_or_else(|| profile_settings.identity_file.clone()),
&target,
cmd,
)
.await
}
Command::Update => {
tokio::task::spawn_blocking(move || {
let status = self_update::backends::github::Update::configure()
.repo_owner("keelerm84")
.repo_name(env!("CARGO_PKG_NAME"))
.bin_name("heimdallr")
.show_download_progress(true)
.current_version(env!("CARGO_PKG_VERSION"))
.build()?
.update()?;
println!("Update status: `{}`!", status.version());
Ok(())
})
.await?
}
}
}
|
//! This crate is responsible for doing the binary encoding for SVM transactions.
//! It code is compiled as a single WASM file and it should be integrated by clients (e.g `smapp / CLI Wallet`).
//!
//! By doing that, a client can locally encode a binary transaction without having to re-implement all the logic
//! of the `svm-codec`.
//!
//! SVM's CI emits the WASM package of `svm-codec` as one of its artifacts (`svm_codec.wasm`)
#![deny(missing_docs)]
#![deny(unused)]
#![deny(dead_code)]
#![allow(unreachable_code)]
#![feature(vec_into_raw_parts)]
mod ext;
mod field;
mod inputdata;
mod section;
mod version;
pub mod call;
pub mod spawn;
pub mod template;
pub use ext::{ReadExt, WriteExt};
pub use field::Field;
pub mod api;
pub mod context;
pub mod envelope;
pub use section::{SectionPreview, SectionsDecoder, SectionsEncoder};
/// Encoding of receipts.
pub mod receipt;
mod error;
pub use error::ParseError;
/// # WASM API
///
/// The following API methods are annotated with `#[cfg(target_arch = "wasm32")]`.
/// In order to output a `.wasm` file run (or run `./build.sh` under the crate root directory).
/// ```
//// cargo +nightly build --release --target wasm32-unknown-unknown
/// ```
///
/// The emitted `svm_codec.wasm` is being tested in the `examples/test.js`
/// In order to build and test run `./run.sh` under the `examples` directory.
///
/// The CI of the `SVM` also runs the js tests and outputs `svm_codec.wasm` under the artifacts.
///
/// ## WASM API Usage
///
/// Before calling `wasm_deploy / wasm_spawn / wasm_call` we need first to allocate
/// a WASM buffer using the `wasm_alloc` method. After the buffer isn't needed anymore, make sure to
/// call the `wasm_free` method. (otherwise it'll be a memory-leak).
///
/// The data returned by `wasm_deploy / wasm_spawn / wasm_call` is a pointer to a new allocated
/// WASM buffer. This WASM buffer is allocated internally by the method and have to be freed later too using `wasm_free`.
///
///
/// WASM Buffer `Data` for Success result:
///
/// ```text
/// +------------------------------------------------+
/// | OK_MAKER = 1 (1 byte) | SVM binary transaction |
/// +------------------------------------------------+
/// ```
///
///
/// WASM Buffer `Data` for Error result:
//
/// ```text
/// +------------------------------------------------+
/// | ERR_MAKER = 0 (1 byte) | UTF-8 String (error) |
/// +------------------------------------------------+
/// ```
///
#[cfg(target_arch = "wasm32")]
macro_rules! wasm_func_call {
($func:ident, $buf_offset:expr) => {{
match api::wasm::$func($buf_offset as usize) {
Ok(tx_offset) => tx_offset as _,
Err(err) => {
let err_offset = api::wasm::into_error_buffer(err);
err_offset as _
}
}
}};
}
/// ## WASM `Deploy Template`
///
/// Reads the WASM buffer given at parameter `offset` containing a JSON value.
/// Encodes a `Deploy Template` binary-transaction using that JSON value.
///
/// Returns a pointer to a new WASM buffer holding the encoded transaction.
/// If the encoding failed, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_encode_deploy(offset: i32) -> i32 {
wasm_func_call!(encode_deploy, offset)
}
/// ## WASM `Spawn Account`
///
/// Reads the WASM buffer given at parameter `offset` containing a JSON value.
/// Encodes a `Spawn Account` binary-transaction using that JSON value.
///
/// Returns a pointer to a new WASM buffer holding the encoded transaction.
/// If the encoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_encode_spawn(offset: i32) -> i32 {
wasm_func_call!(encode_spawn, offset)
}
/// Decodes the encoded `Spawn Account` given as a WASM buffer (parameter `offset`).
///
/// Returns a pointer to a new WASM buffer holding the decoded transaction.
/// If the decoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_decode_spawn(offset: i32) -> i32 {
wasm_func_call!(decode_spawn, offset)
}
/// ## WASM `Call Account`
///
/// Reads the WASM buffer given at parameter `offset` containing a JSON value.
/// Encodes a `Call Account` binary-transaction using that JSON value.
///
/// Returns a pointer to a new WASM buffer holding the encoded transaction.
/// If the encoding failed, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_encode_call(offset: i32) -> i32 {
wasm_func_call!(encode_call, offset)
}
/// Decodes the encoded `Call Account` given as a WASM buffer (parameter `offset`).
///
/// Returns a pointer to a new WASM buffer holding the decoded transaction.
/// If the decoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_decode_call(offset: i32) -> i32 {
wasm_func_call!(decode_call, offset)
}
/// ## WASM Buffer Allocation
///
/// Allocates a new WASM Buffer holding data of `length` bytes.
///
/// For more info read: `api::wasm::alloc`
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_alloc(length: i32) -> i32 {
let offset = api::wasm::alloc(length as usize);
offset as _
}
/// ## WASM Buffer Freeing
///
/// Frees the WASM buffer allocated starting from offset `offset`.
///
/// For more info read: `api::wasm::free`
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_free(offset: i32) {
api::wasm::free(offset as usize);
}
/// ## WASM Buffer Length
///
/// Returns the buffer `Data` byte-length
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_buffer_length(offset: i32) -> i32 {
let buf_len = api::wasm::wasm_buf_len(offset as usize);
buf_len as _
}
/// ## WASM Buffer Data
///
/// Returns a pointer to the buffer `Data`
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_buffer_data(offset: i32) -> i32 {
let (data_offset, _len) = api::wasm::wasm_buf_data_offset(offset as usize);
data_offset as _
}
/// ## Input Data (i.e `CallData/VerifyData`)
///
/// Reads the WASM buffer given at parameter `offset` containing a JSON value.
/// Encodes the `Input Data`, and returns a pointer to a new WASM buffer holding the encoded `Input Data`.
/// If the encoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_encode_inputdata(offset: i32) -> i32 {
wasm_func_call!(encode_inputdata, offset)
}
/// Decodes the encoded `Input Data` given as a WASM buffer (parameter `offset`).
///
/// Returns a pointer to a new WASM buffer holding the decoded `Input Data`.
/// If the decoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_decode_inputdata(offset: i32) -> i32 {
wasm_func_call!(decode_inputdata, offset)
}
/// Decodes the encoded `Receipt` given as a WASM buffer (parameter `offset`).
///
/// Returns a pointer to a new WASM buffer holding the decoded `Receipt`.
/// If the decoding fails, the returned WASM buffer will contain a String containing the error message.
#[no_mangle]
#[cfg(target_arch = "wasm32")]
pub extern "C" fn wasm_decode_receipt(offset: i32) -> i32 {
wasm_func_call!(decode_receipt, offset)
}
|
use wasm_bindgen::prelude::*;
use js_sys::{Array};
#[wasm_bindgen]
pub fn calculation(x:i32,y:i32) -> i32{
x+y
} |
use std::io::Error as IoError;
use rustc_serialize::base64::FromBase64Error;
use rustc_serialize::json::{EncoderError, /*DecoderError*/};
use mongodb::error::Error as MongoError;
use storage::error::StorageError;
#[derive(Debug)]
pub enum CommandError {
Io(IoError),
Mongo(MongoError),
MissingField(String),
UuidFailure,
HashFailure,
Base64(FromBase64Error),
JsonEncode(EncoderError),
//JsonDecode(DecoderError),
AmbiguousId,
AssetNotFound,
Md5Mismatch,
InvalidPathUnicode,
MimeParseFailure,
NameTaken,
UserNotFound,
Storage(StorageError)
}
pub type CommandResult<T> = Result<T, CommandError>;
|
// #[no_mangle] keeps Rust from "mangling" the name of the exported functoin
// extern "C" means to use the C ABI to expose this function, allowing C# to pull it in
#[no_mangle]
pub extern "C" fn multiply(a: i32, b: i32) -> i32 {
a * b
}
#[no_mangle]
pub extern "C" fn hamming_distance(a: u32, b: u32) -> u32 {
(a ^ b).count_ones()
}
|
use std::collections::HashSet;
use std::io::{self};
fn main() -> io::Result<()> {
let f = "input.txt";
let vec: Vec<i32> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.parse::<i32>().unwrap())
.collect();
let mut numbers = HashSet::new();
for my_int in &vec {
if numbers.contains(my_int) {
assert_eq!(1018336, my_int * (2020 - my_int));
println!("{}", my_int * (2020 - my_int));
break;
}
numbers.insert(2020 - my_int);
}
let mut vec = vec;
vec.sort();
let target_sum = 2020;
'outer: for (i, el) in vec.iter().enumerate() {
let mut right = vec.len() - 1;
let mut left = i + 1;
while left < right {
let right_el = vec[right];
let left_el = vec[left];
let summed = el + left_el + right_el;
if summed == target_sum {
assert_eq!(288756720, right_el * left_el * el);
println!("{}", right_el * left_el * el);
break 'outer;
} else if summed < target_sum {
left += 1;
} else if summed > target_sum {
right -= 1;
} else {
panic!("NOPE");
}
}
}
Ok(())
}
|
pub mod codec;
pub mod convert;
pub mod error;
pub mod hash;
pub mod ipld;
pub use cid;
|
#[doc = "Reader of register UARTDR"]
pub type R = crate::R<u32, super::UARTDR>;
#[doc = "Writer for register UARTDR"]
pub type W = crate::W<u32, super::UARTDR>;
#[doc = "Register UARTDR `reset()`'s with value 0"]
impl crate::ResetValue for super::UARTDR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `OE`"]
pub type OE_R = crate::R<bool, bool>;
#[doc = "Reader of field `BE`"]
pub type BE_R = crate::R<bool, bool>;
#[doc = "Reader of field `PE`"]
pub type PE_R = crate::R<bool, bool>;
#[doc = "Reader of field `FE`"]
pub type FE_R = crate::R<bool, bool>;
#[doc = "Reader of field `DATA`"]
pub type DATA_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DATA`"]
pub struct DATA_W<'a> {
w: &'a mut W,
}
impl<'a> DATA_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
impl R {
#[doc = "Bit 11 - Overrun error. This bit is set to 1 if data is received and the receive FIFO is already full. This is cleared to 0 once there is an empty space in the FIFO and a new character can be written to it."]
#[inline(always)]
pub fn oe(&self) -> OE_R {
OE_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - Break error. This bit is set to 1 if a break condition was detected, indicating that the received data input was held LOW for longer than a full-word transmission time (defined as start, data, parity and stop bits). In FIFO mode, this error is associated with the character at the top of the FIFO. When a break occurs, only one 0 character is loaded into the FIFO. The next character is only enabled after the receive data input goes to a 1 (marking state), and the next valid start bit is received."]
#[inline(always)]
pub fn be(&self) -> BE_R {
BE_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9 - Parity error. When set to 1, it indicates that the parity of the received data character does not match the parity that the EPS and SPS bits in the Line Control Register, UARTLCR_H. In FIFO mode, this error is associated with the character at the top of the FIFO."]
#[inline(always)]
pub fn pe(&self) -> PE_R {
PE_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - Framing error. When set to 1, it indicates that the received character did not have a valid stop bit (a valid stop bit is 1). In FIFO mode, this error is associated with the character at the top of the FIFO."]
#[inline(always)]
pub fn fe(&self) -> FE_R {
FE_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bits 0:7 - Receive (read) data character. Transmit (write) data character."]
#[inline(always)]
pub fn data(&self) -> DATA_R {
DATA_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Receive (read) data character. Transmit (write) data character."]
#[inline(always)]
pub fn data(&mut self) -> DATA_W {
DATA_W { w: self }
}
}
|
#[doc = "Reader of register LL_CONTROL"]
pub type R = crate::R<u32, super::LL_CONTROL>;
#[doc = "Writer for register LL_CONTROL"]
pub type W = crate::W<u32, super::LL_CONTROL>;
#[doc = "Register LL_CONTROL `reset()`'s with value 0x02"]
impl crate::ResetValue for super::LL_CONTROL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x02
}
}
#[doc = "Reader of field `PRIV_1_2`"]
pub type PRIV_1_2_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRIV_1_2`"]
pub struct PRIV_1_2_W<'a> {
w: &'a mut W,
}
impl<'a> PRIV_1_2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DLE`"]
pub type DLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DLE`"]
pub struct DLE_W<'a> {
w: &'a mut W,
}
impl<'a> DLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `WL_READ_AS_MEM`"]
pub type WL_READ_AS_MEM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `WL_READ_AS_MEM`"]
pub struct WL_READ_AS_MEM_W<'a> {
w: &'a mut W,
}
impl<'a> WL_READ_AS_MEM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL`"]
pub type ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL`"]
pub struct ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_W<'a> {
w: &'a mut W,
}
impl<'a> ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `HW_RSLV_LIST_FULL`"]
pub type HW_RSLV_LIST_FULL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HW_RSLV_LIST_FULL`"]
pub struct HW_RSLV_LIST_FULL_W<'a> {
w: &'a mut W,
}
impl<'a> HW_RSLV_LIST_FULL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV`"]
pub type RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV`"]
pub struct RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_W<'a> {
w: &'a mut W,
}
impl<'a> RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV`"]
pub type RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV`"]
pub struct RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_W<'a> {
w: &'a mut W,
}
impl<'a> RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN`"]
pub type RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN`"]
pub struct RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_W<'a> {
w: &'a mut W,
}
impl<'a> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI`"]
pub type RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI`"]
pub struct RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_W<'a> {
w: &'a mut W,
}
impl<'a> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI`"]
pub type RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI`"]
pub struct RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_W<'a> {
w: &'a mut W,
}
impl<'a> RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `PRIV_1_2_ADV`"]
pub type PRIV_1_2_ADV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRIV_1_2_ADV`"]
pub struct PRIV_1_2_ADV_W<'a> {
w: &'a mut W,
}
impl<'a> PRIV_1_2_ADV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `PRIV_1_2_SCAN`"]
pub type PRIV_1_2_SCAN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRIV_1_2_SCAN`"]
pub struct PRIV_1_2_SCAN_W<'a> {
w: &'a mut W,
}
impl<'a> PRIV_1_2_SCAN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `PRIV_1_2_INIT`"]
pub type PRIV_1_2_INIT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRIV_1_2_INIT`"]
pub struct PRIV_1_2_INIT_W<'a> {
w: &'a mut W,
}
impl<'a> PRIV_1_2_INIT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `EN_CONN_RX_EN_MOD`"]
pub type EN_CONN_RX_EN_MOD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EN_CONN_RX_EN_MOD`"]
pub struct EN_CONN_RX_EN_MOD_W<'a> {
w: &'a mut W,
}
impl<'a> EN_CONN_RX_EN_MOD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `SLV_CONN_PEER_RPA_NOT_RSLVD`"]
pub type SLV_CONN_PEER_RPA_NOT_RSLVD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SLV_CONN_PEER_RPA_NOT_RSLVD`"]
pub struct SLV_CONN_PEER_RPA_NOT_RSLVD_W<'a> {
w: &'a mut W,
}
impl<'a> SLV_CONN_PEER_RPA_NOT_RSLVD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Write proxy for field `ADVCH_FIFO_FLUSH`"]
pub struct ADVCH_FIFO_FLUSH_W<'a> {
w: &'a mut W,
}
impl<'a> ADVCH_FIFO_FLUSH_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
impl R {
#[doc = "Bit 0 - Enables Privacy 1.2 Feature."]
#[inline(always)]
pub fn priv_1_2(&self) -> PRIV_1_2_R {
PRIV_1_2_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Enables Data Length extension feature in DTM, connection and encryption modules. This bit should always be set to 1'b1. 1'b0 is not supported."]
#[inline(always)]
pub fn dle(&self) -> DLE_R {
DLE_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - The Whilelist read logic is controlled using this bit. 0 - The reads to the whitelist address range is treated as FIFO reads and the pointers are reset by issueing the RESET_READ_PTR command. 1 - The reads to the whitelist address range is treated an memory reads. Any whilelist entry can be read."]
#[inline(always)]
pub fn wl_read_as_mem(&self) -> WL_READ_AS_MEM_R {
WL_READ_AS_MEM_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Controls the ADVCH FIFO flushing when PRIV_1_2 is enabled. 0 - Flushes all ADV & INIT packets, as in non privacy 1.2 mode, except those with unresolved peer or self RPA. 1 - Does not flush any CRC good packets"]
#[inline(always)]
pub fn advch_fifo_priv_1_2_flush_ctrl(&self) -> ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_R {
ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - This bit indicates that the resolving list in the hardware is full and the list is extended in the FW. This will affect the behavior of address resolution. 0 - The resolving list in the hardware is not fully filled. When Whitelist is disabled and a peer identity address not in the resolving list is received, the packet is responded to by the hardware. 1 - The resolving list in the hardware is fully filled. All address comparisons must be extended to the Firmware list as well, Any match in the Firmware list should be followed by copying the matching entry into the hardware resolving list."]
#[inline(always)]
pub fn hw_rslv_list_full(&self) -> HW_RSLV_LIST_FULL_R {
HW_RSLV_LIST_FULL_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - This bit controls the ADV engine behavior when an initiator address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_init_addr_match_priv_mismatch_adv(&self) -> RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_R {
RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - This bit controls the ADV engine behavior when a scanner address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_scan_addr_match_priv_mismatch_adv(&self) -> RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_R {
RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - This bit controls the SCAN engine behavior when an peer address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_peer_addr_match_priv_mismatch_scn(&self) -> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_R {
RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - This bit controls the INIT engine behavior when an peer address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_peer_addr_match_priv_mismatch_ini(&self) -> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_R {
RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - This bit controls the INIT engine behavior when a self address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_self_addr_match_priv_mismatch_ini(&self) -> RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_R {
RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Enables Privacy 1.2 for ADV engine"]
#[inline(always)]
pub fn priv_1_2_adv(&self) -> PRIV_1_2_ADV_R {
PRIV_1_2_ADV_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Enables Privacy 1.2 for SCAN engine"]
#[inline(always)]
pub fn priv_1_2_scan(&self) -> PRIV_1_2_SCAN_R {
PRIV_1_2_SCAN_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Enables Privacy 1.2 for INIT engine"]
#[inline(always)]
pub fn priv_1_2_init(&self) -> PRIV_1_2_INIT_R {
PRIV_1_2_INIT_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - This bit controls the Connection RX enable modification mode when SLV_CONN_PEER_RPA_NOT_RSLVD is set. 1'b0 - The Connection RX enable is unmodified 1'b1 - The Connection RX enable is during the Peer INIT RPA unresolved state is modified, until it is resolved."]
#[inline(always)]
pub fn en_conn_rx_en_mod(&self) -> EN_CONN_RX_EN_MOD_R {
EN_CONN_RX_EN_MOD_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - This bit is asserted when SLV_CONN_PEER_RPA_UNMCH_INTR is set. The device does not enter into Connection established state until this bit is cleared after the RPA is resoved by the firmware. If the firmware is not able to resolve the RPA within the supervision timeout, the device aborts the connection establishement and this bit is cleared by the hardware. This bit is valid only if PRIV_1_2 is set."]
#[inline(always)]
pub fn slv_conn_peer_rpa_not_rslvd(&self) -> SLV_CONN_PEER_RPA_NOT_RSLVD_R {
SLV_CONN_PEER_RPA_NOT_RSLVD_R::new(((self.bits >> 14) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Enables Privacy 1.2 Feature."]
#[inline(always)]
pub fn priv_1_2(&mut self) -> PRIV_1_2_W {
PRIV_1_2_W { w: self }
}
#[doc = "Bit 1 - Enables Data Length extension feature in DTM, connection and encryption modules. This bit should always be set to 1'b1. 1'b0 is not supported."]
#[inline(always)]
pub fn dle(&mut self) -> DLE_W {
DLE_W { w: self }
}
#[doc = "Bit 2 - The Whilelist read logic is controlled using this bit. 0 - The reads to the whitelist address range is treated as FIFO reads and the pointers are reset by issueing the RESET_READ_PTR command. 1 - The reads to the whitelist address range is treated an memory reads. Any whilelist entry can be read."]
#[inline(always)]
pub fn wl_read_as_mem(&mut self) -> WL_READ_AS_MEM_W {
WL_READ_AS_MEM_W { w: self }
}
#[doc = "Bit 3 - Controls the ADVCH FIFO flushing when PRIV_1_2 is enabled. 0 - Flushes all ADV & INIT packets, as in non privacy 1.2 mode, except those with unresolved peer or self RPA. 1 - Does not flush any CRC good packets"]
#[inline(always)]
pub fn advch_fifo_priv_1_2_flush_ctrl(&mut self) -> ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_W {
ADVCH_FIFO_PRIV_1_2_FLUSH_CTRL_W { w: self }
}
#[doc = "Bit 4 - This bit indicates that the resolving list in the hardware is full and the list is extended in the FW. This will affect the behavior of address resolution. 0 - The resolving list in the hardware is not fully filled. When Whitelist is disabled and a peer identity address not in the resolving list is received, the packet is responded to by the hardware. 1 - The resolving list in the hardware is fully filled. All address comparisons must be extended to the Firmware list as well, Any match in the Firmware list should be followed by copying the matching entry into the hardware resolving list."]
#[inline(always)]
pub fn hw_rslv_list_full(&mut self) -> HW_RSLV_LIST_FULL_W {
HW_RSLV_LIST_FULL_W { w: self }
}
#[doc = "Bit 5 - This bit controls the ADV engine behavior when an initiator address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_init_addr_match_priv_mismatch_adv(
&mut self,
) -> RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_W {
RPT_INIT_ADDR_MATCH_PRIV_MISMATCH_ADV_W { w: self }
}
#[doc = "Bit 6 - This bit controls the ADV engine behavior when a scanner address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_scan_addr_match_priv_mismatch_adv(
&mut self,
) -> RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_W {
RPT_SCAN_ADDR_MATCH_PRIV_MISMATCH_ADV_W { w: self }
}
#[doc = "Bit 7 - This bit controls the SCAN engine behavior when an peer address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_peer_addr_match_priv_mismatch_scn(
&mut self,
) -> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_W {
RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_SCN_W { w: self }
}
#[doc = "Bit 8 - This bit controls the INIT engine behavior when an peer address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_peer_addr_match_priv_mismatch_ini(
&mut self,
) -> RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_W {
RPT_PEER_ADDR_MATCH_PRIV_MISMATCH_INI_W { w: self }
}
#[doc = "Bit 9 - This bit controls the INIT engine behavior when a self address match occurs but a privacy mismatch occurs 0 - The packet is aborted 1 - The packet is received and reported to the Link Layer firmware"]
#[inline(always)]
pub fn rpt_self_addr_match_priv_mismatch_ini(
&mut self,
) -> RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_W {
RPT_SELF_ADDR_MATCH_PRIV_MISMATCH_INI_W { w: self }
}
#[doc = "Bit 10 - Enables Privacy 1.2 for ADV engine"]
#[inline(always)]
pub fn priv_1_2_adv(&mut self) -> PRIV_1_2_ADV_W {
PRIV_1_2_ADV_W { w: self }
}
#[doc = "Bit 11 - Enables Privacy 1.2 for SCAN engine"]
#[inline(always)]
pub fn priv_1_2_scan(&mut self) -> PRIV_1_2_SCAN_W {
PRIV_1_2_SCAN_W { w: self }
}
#[doc = "Bit 12 - Enables Privacy 1.2 for INIT engine"]
#[inline(always)]
pub fn priv_1_2_init(&mut self) -> PRIV_1_2_INIT_W {
PRIV_1_2_INIT_W { w: self }
}
#[doc = "Bit 13 - This bit controls the Connection RX enable modification mode when SLV_CONN_PEER_RPA_NOT_RSLVD is set. 1'b0 - The Connection RX enable is unmodified 1'b1 - The Connection RX enable is during the Peer INIT RPA unresolved state is modified, until it is resolved."]
#[inline(always)]
pub fn en_conn_rx_en_mod(&mut self) -> EN_CONN_RX_EN_MOD_W {
EN_CONN_RX_EN_MOD_W { w: self }
}
#[doc = "Bit 14 - This bit is asserted when SLV_CONN_PEER_RPA_UNMCH_INTR is set. The device does not enter into Connection established state until this bit is cleared after the RPA is resoved by the firmware. If the firmware is not able to resolve the RPA within the supervision timeout, the device aborts the connection establishement and this bit is cleared by the hardware. This bit is valid only if PRIV_1_2 is set."]
#[inline(always)]
pub fn slv_conn_peer_rpa_not_rslvd(&mut self) -> SLV_CONN_PEER_RPA_NOT_RSLVD_W {
SLV_CONN_PEER_RPA_NOT_RSLVD_W { w: self }
}
#[doc = "Bit 15 - When set, flushes the ADVCH FIFO. The bit is auto cleared. Note that this should be used only when the FIFO is not read by the firmware. If firmware has started reading the FIFO, then the FIFO must be emptied exclusively by firmware reads"]
#[inline(always)]
pub fn advch_fifo_flush(&mut self) -> ADVCH_FIFO_FLUSH_W {
ADVCH_FIFO_FLUSH_W { w: self }
}
}
|
// error-pattern:sequence in for each loop not a call
fn main() { for each p in 1 { } }
|
fn get_area(k: i128, factor: i128) -> Option<i128> {
let area_squared = (3 * k + factor) * k * k * (k + factor);
let area = (area_squared as f64).sqrt();
if (area == (area as u128) as f64) && ((area as i128) * (area as i128) == area_squared) {
return Some(area as i128);
}
None
}
fn main() {
let mut perimeter_total: i128 = 0;
let limit: i128 = 1_000_000_000;
'theloop: for k in 2..limit {
for factor in &[-1, 1] {
let a = k * 2;
let b = a + factor;
let c = a + factor;
let perimeter = a + b + c;
if perimeter > limit {
break 'theloop;
}
match get_area(k, *factor) {
Some(area) => {
println!("{}, {}, {} : Per:{}, Area:{}", a, b, c, perimeter, area);
perimeter_total += perimeter;
}
None => {}
}
}
}
println!("Ta-da? {}", perimeter_total);
}
|
fn main() {
nested();
}
// ๅตๅฅๅฝๆฐ
fn nested() {
// ๅๅงๅๅตๅฅๅฝๆฐmulti, ๅฎๆฏไธไธชๅฝๆฐ็ฑปๅ็ๅ้
fn multi(x: i32) -> i32 {
x * 2
}
// ่พๅบๅ้multi็ปๆ
assert_eq!(14, multi(7));
// ๅ้ไผ ้multi - bar
let bar: fn(i32) -> i32 = multi;
assert_eq!(14, bar(7));
}
|
#![allow(while_true)]
#[path = "libs/utils.rs"]
mod utils;
pub fn execute(numero_max_cidades: &u8, matriz_distancias: &Vec<Vec<u16>>) {
let tamanho_array: usize = (*numero_max_cidades + 1) as usize;
let limite_destino_final: usize = *numero_max_cidades as usize;
let mut cidade_inicial: i8 = -1;
while true {
print!("Introduza a cidade inical: ");
cidade_inicial = match utils::read_input().parse::<i8>() {
Ok(i) => i,
Err(_e) => -1,
};
if cidade_inicial < 0 || cidade_inicial >= *numero_max_cidades as i8 {
println!("Opรงรฃo Invรกlida!");
continue;
}
break;
}
let mut rota_menor = vec![0u8; tamanho_array];
let mut rota_swap = vec![0u8; tamanho_array];
for x in 0..limite_destino_final {
rota_menor[x] = x as u8;
rota_swap[x] = x as u8;
}
rota_menor[*numero_max_cidades as usize] = cidade_inicial as u8;
rota_swap[*numero_max_cidades as usize] = cidade_inicial as u8;
for i in (1..(cidade_inicial + 1)).rev() {
rota_menor[i as usize] = rota_menor[(i - 1) as usize];
rota_swap[i as usize] = rota_swap[(i - 1) as usize];
}
rota_menor[0] = cidade_inicial as u8;
rota_swap[0] = cidade_inicial as u8;
let mut distance_min: u16 = calculate_distance(&rota_menor, &matriz_distancias);
for i in 1..limite_destino_final {
for j in 1..limite_destino_final {
let troca_valores_1 = rota_swap[i];
let troca_valores_2 = rota_swap[j];
rota_swap[j] = troca_valores_1;
rota_swap[i] = troca_valores_2;
let distance = calculate_distance(&rota_swap, &matriz_distancias);
if distance < distance_min {
rota_menor = rota_swap.clone();
distance_min = distance;
}
}
}
println!("Rota Menor: {:?}\nDistancia : {} KM", rota_menor, distance_min);
println!("------------------------------------------------------------------------");
}
fn calculate_distance(route: &Vec<u8>, matriz_distancias: &Vec<Vec<u16>>) -> u16 {
let mut distance: u16 = 0;
for i in 0..(route.len() - 1) {
distance = distance + matriz_distancias[route[i] as usize][route[i + 1] as usize] as u16;
}
distance
} |
use super::*;
use syn::{ Lit, Ident };
use crate::util::{ lit_from_meta_attribute };
use proc_macro2::TokenStream;
//This gets the attributes of the parent enum
pub fn get_enum_attributes(attrs : &Vec<syn::Attribute>, parent_data_type : &ParentDataType) -> EnumDiscriminant
{
let discriminant_data_type : FieldDataType = get_field_type(attrs, parent_data_type);
let discriminant : syn::Lit = match get_meta_attribute(attrs, "size_in_bits")
{
//If its not a num then parse it as a Lit
Some(meta) => lit_from_meta_attribute(meta),
//otherwise parse the num to a Lit
None => get_discriminant(attrs)
};
let is_str = match get_meta_attribute(attrs, "str")
{
Some(_st) => true,
None => false
};
let reader = get_reader_literal(&discriminant, &discriminant_data_type, is_str);
let size_in_bits = match discriminant_data_type
{
FieldDataType::FromReader => quote!{ 0 },
//otherwise the discriminant is the size_in_bits so we return it as the predicate
_ => quote!{ #discriminant }
};
return EnumDiscriminant
{
size_in_bits : size_in_bits,
data_type : discriminant_data_type,
reader_literal : reader
};
}
pub struct EnumDiscriminant
{
pub size_in_bits : TokenStream,
pub data_type : FieldDataType,
pub reader_literal : TokenStream
}
pub fn get_reader_literal(derivable : &syn::Lit, discriminant_data_type : &FieldDataType, is_str : bool) -> TokenStream
{
match derivable
{
Lit::Int(lit_int) =>
{
//create the bitmask for this byte
let bitmask : TokenStream = quote!{ ((1 << #lit_int) - 1) << (8 - #lit_int) };
match discriminant_data_type
{
FieldDataType::FromReader => panic!("Packattack Internal Error: No byte token when reading from_reader!"),
FieldDataType::FromBytes => quote!{ bytes[0] },
FieldDataType::FromBits =>
{
// read the byte, mask it for the bits we want,
//and bit shift them back to the beginning of the u8
//finally pass that value into from_u8
quote!{ (bytes[0] & #bitmask) >> (8 - #lit_int) }
},
FieldDataType::FromSlice => quote!{ 0 },
FieldDataType::Payload => panic!("Discriminants can't be a payload! Do you know what you're doing? Maybe read the docs...")
}
}
Lit::Str(lit_str) =>
{
let identifier = Ident::new(lit_str.value().as_str(),lit_str.span());
if is_str { return quote!{ &*<String>::from_reader(reader).await? }; }
quote!{ usize::from(<#identifier>::from_reader(reader).await?) }
},
_=> panic!(" Packattack only supports type literals and usizes as size_in_bytes!")
}
} |
use std::fmt::Debug;
use crate::{
version::repository::VersionRepository,
version_revision_resolver::{vrr_id::VrrId, VersionRevisionResolver},
vtable::repository::VTableRepository,
};
/// Types that must be implemented in an infrastructure layer.
pub trait ImmutableSchemaAbstractTypes: Debug + Sized {
type VrrId: VrrId;
type VTableRepo: VTableRepository<Self>;
type VersionRepo: VersionRepository;
type Vrr: VersionRevisionResolver<Self>;
}
|
//! Cortex-M Security Extensions
//!
//! This module provides several helper functions to support Armv8-M and Armv8.1-M Security
//! Extensions.
//! Most of this implementation is directly inspired by the "Armv8-M Security Extensions:
//! Requirements on Development Tools" document available here:
//! https://developer.arm.com/docs/ecm0359818/latest
//!
//! Please note that the TT instructions support as described part 4 of the document linked above is
//! not part of CMSE but is still present in this module. The TT instructions return the
//! configuration of the Memory Protection Unit at an address.
//!
//! # Notes
//!
//! * Non-Secure Unprivileged code will always read zeroes from TestTarget and should not use it.
//! * Non-Secure Privileged code can check current (AccessType::Current) and Non-Secure Unprivileged
//! accesses (AccessType::Unprivileged).
//! * Secure Unprivileged code can check Non-Secure Unprivileged accesses (AccessType::NonSecure).
//! * Secure Privileged code can check all access types.
//!
//! # Example
//!
//! ```
//! use cortex_m::cmse::{TestTarget, AccessType};
//!
//! // suspect_address was given by Non-Secure to a Secure function to write at it.
//! // But is it allowed to?
//! let suspect_address_test = TestTarget::check(0xDEADBEEF as *mut u32,
//! AccessType::NonSecureUnprivileged);
//! if suspect_address_test.ns_read_and_writable() {
//! // Non-Secure can not read or write this address!
//! }
//! ```
use crate::asm::{tt, tta, ttat, ttt};
use bitfield::bitfield;
/// Memory access behaviour: determine which privilege execution mode is used and which Memory
/// Protection Unit (MPU) is used.
#[derive(PartialEq, Copy, Clone, Debug)]
pub enum AccessType {
/// Access using current privilege level and reading from current security state MPU.
/// Uses the TT instruction.
Current,
/// Unprivileged access reading from current security state MPU. Uses the TTT instruction.
Unprivileged,
/// Access using current privilege level reading from Non-Secure MPU. Uses the TTA instruction.
/// Undefined if used from Non-Secure state.
NonSecure,
/// Unprivilege access reading from Non-Secure MPU. Uses the TTAT instruction.
/// Undefined if used from Non-Secure state.
NonSecureUnprivileged,
}
/// Abstraction of TT instructions and helper functions to determine the security and privilege
/// attribute of a target address, accessed in different ways.
#[derive(PartialEq, Copy, Clone, Debug)]
pub struct TestTarget {
tt_resp: TtResp,
access_type: AccessType,
}
bitfield! {
/// Test Target Response Payload
///
/// Provides the response payload from a TT, TTA, TTT or TTAT instruction.
#[derive(PartialEq, Copy, Clone)]
struct TtResp(u32);
impl Debug;
mregion, _: 7, 0;
sregion, _: 15, 8;
mrvalid, _: 16;
srvalid, _: 17;
r, _: 18;
rw, _: 19;
nsr, _: 20;
nsrw, _: 21;
s, _: 22;
irvalid, _: 23;
iregion, _: 31, 24;
}
impl TestTarget {
/// Creates a Test Target Response Payload by testing addr using access_type.
#[inline]
pub fn check(addr: *mut u32, access_type: AccessType) -> Self {
let tt_resp = match access_type {
AccessType::Current => TtResp(tt(addr)),
AccessType::Unprivileged => TtResp(ttt(addr)),
AccessType::NonSecure => TtResp(tta(addr)),
AccessType::NonSecureUnprivileged => TtResp(ttat(addr)),
};
TestTarget {
tt_resp,
access_type,
}
}
/// Creates a Test Target Response Payload by testing the zone from addr to addr + size - 1
/// using access_type.
/// Returns None if:
/// * the address zone overlaps SAU, IDAU or MPU region boundaries
/// * size is 0
/// * addr + size - 1 overflows
#[inline]
pub fn check_range(addr: *mut u32, size: usize, access_type: AccessType) -> Option<Self> {
let begin: usize = addr as usize;
// Last address of the range (addr + size - 1). This also checks if size is 0.
let end: usize = begin.checked_add(size.checked_sub(1)?)?;
// Regions are aligned at 32-byte boundaries. If the address range fits in one 32-byte
// address line, a single TT instruction suffices. This is the case when the following
// constraint holds.
let single_check: bool = (begin % 32).checked_add(size)? <= 32usize;
let test_start = TestTarget::check(addr, access_type);
if single_check {
Some(test_start)
} else {
let test_end = TestTarget::check(end as *mut u32, access_type);
// Check that the range does not cross SAU, IDAU or MPU region boundaries.
if test_start != test_end {
None
} else {
Some(test_start)
}
}
}
/// Access type that was used for this test target.
#[inline]
pub fn access_type(self) -> AccessType {
self.access_type
}
/// Get the raw u32 value returned by the TT instruction used.
#[inline]
pub fn as_u32(self) -> u32 {
self.tt_resp.0
}
/// Read accessibility of the target address. Only returns the MPU settings without checking
/// the Security state of the target.
/// For Unprivileged and NonSecureUnprivileged access types, returns the permissions for
/// unprivileged access, regardless of whether the current mode is privileged or unprivileged.
/// Returns false if the TT instruction was executed from an unprivileged mode
/// and the NonSecure access type was not specified.
/// Returns false if the address matches multiple MPU regions.
#[inline]
pub fn readable(self) -> bool {
self.tt_resp.r()
}
/// Read and write accessibility of the target address. Only returns the MPU settings without
/// checking the Security state of the target.
/// For Unprivileged and NonSecureUnprivileged access types, returns the permissions for
/// unprivileged access, regardless of whether the current mode is privileged or unprivileged.
/// Returns false if the TT instruction was executed from an unprivileged mode
/// and the NonSecure access type was not specified.
/// Returns false if the address matches multiple MPU regions.
#[inline]
pub fn read_and_writable(self) -> bool {
self.tt_resp.rw()
}
/// Indicate the MPU region number containing the target address.
/// Returns None if the value is not valid:
/// * the MPU is not implemented or MPU_CTRL.ENABLE is set to zero
/// * the register argument specified by the MREGION field does not match any enabled MPU regions
/// * the address matched multiple MPU regions
/// * the address specified by the SREGION field is exempt from the secure memory attribution
/// * the TT instruction was executed from an unprivileged mode and the A flag was not specified.
#[inline]
pub fn mpu_region(self) -> Option<u8> {
if self.tt_resp.mrvalid() {
// Cast is safe as MREGION field is defined on 8 bits.
Some(self.tt_resp.mregion() as u8)
} else {
None
}
}
/// Indicates the Security attribute of the target address. Independent of AccessType.
/// Always zero when the test target is done in the Non-Secure state.
#[inline]
pub fn secure(self) -> bool {
self.tt_resp.s()
}
/// Non-Secure Read accessibility of the target address.
/// Same as readable() && !secure()
#[inline]
pub fn ns_readable(self) -> bool {
self.tt_resp.nsr()
}
/// Non-Secure Read and Write accessibility of the target address.
/// Same as read_and_writable() && !secure()
#[inline]
pub fn ns_read_and_writable(self) -> bool {
self.tt_resp.nsrw()
}
/// Indicate the IDAU region number containing the target address. Independent of AccessType.
/// Returns None if the value is not valid:
/// * the IDAU cannot provide a region number
/// * the address is exempt from security attribution
/// * the test target is done from Non-Secure state
#[inline]
pub fn idau_region(self) -> Option<u8> {
if self.tt_resp.irvalid() {
// Cast is safe as IREGION field is defined on 8 bits.
Some(self.tt_resp.iregion() as u8)
} else {
None
}
}
/// Indicate the SAU region number containing the target address. Independent of AccessType.
/// Returns None if the value is not valid:
/// * SAU_CTRL.ENABLE is set to zero
/// * the register argument specified in the SREGION field does not match any enabled SAU regions
/// * the address specified matches multiple enabled SAU regions
/// * the address specified by the SREGION field is exempt from the secure memory attribution
/// * the TT instruction was executed from the Non-secure state or the Security Extension is not
/// implemented
#[inline]
pub fn sau_region(self) -> Option<u8> {
if self.tt_resp.srvalid() {
// Cast is safe as SREGION field is defined on 8 bits.
Some(self.tt_resp.sregion() as u8)
} else {
None
}
}
}
|
use diesel::RunQueryDsl;
use radmin::diesel::PgConnection;
use radmin::serde::{Deserialize, Serialize};
use crate::models::Email;
use crate::schema::email_addresses;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Insertable)]
#[table_name = "email_addresses"]
pub struct EmailFactory {
pub account: String,
pub domain: String,
}
impl EmailFactory {
pub fn new<S: Into<String>>(account: S, domain: S) -> EmailFactory {
EmailFactory {
account: account.into(),
domain: domain.into(),
}
}
pub fn insert(self, conn: &PgConnection) -> Email {
radmin::diesel::insert_into(email_addresses::table)
.values(&self)
.get_result(conn)
.expect("Failed to insert new Category Factory")
}
pub fn account<S: Into<String>>(mut self, account: S) -> EmailFactory {
self.account = account.into();
self
}
pub fn domain<S: Into<String>>(mut self, domain: S) -> EmailFactory {
self.domain = domain.into();
self
}
}
|
use super::readers::{ReadAllFn, ReadFn};
use super::writers::{FlushFn, WriteFn};
use duktape::prelude::*;
pub(crate) fn build_readwriter<'a>(ctx: &'a Context) -> DukResult<Function<'a>> {
let mut readwriter = class::build();
readwriter
.name("ReadWriter")
.method(
"write",
(1, |_ctx: &Context, _this: &mut class::Instance| {
duk_error!("Don't use the Writer class directly")
}),
)
.method("flush", |_ctx: &Context, _this: &mut class::Instance| {
duk_error!("Don't use the Writer class directly")
})
.method(
"read",
(1, |_ctx: &Context, _this: &mut class::Instance| {
duk_error!("Don't use the Reader class directly")
}),
)
.method(
"readAll",
(1, |_ctx: &Context, _this: &mut class::Instance| {
duk_error!("Don't use the Reader class directly")
}),
);
let readwriter = ctx.push(readwriter)?.getp()?;
Ok(readwriter)
}
pub(crate) fn build_readwriter_class<'a>(
ctx: &'a Context,
readwriter: Function<'a>,
) -> DukResult<Function<'a>> {
let mut builder = class::build();
builder
.inherit(readwriter)
.method("write", WriteFn {})
.method("flush", FlushFn {})
.method("read", ReadFn {})
//.method("readLine", ReadLineFn {})
.method("readAll", ReadAllFn {});
Ok(ctx.push(builder)?.getp()?)
}
|
// id number A unique numeric ID that can be used to
// identify and reference an action.
// status string The current status of the action. This can
// be "in-progress", "completed", or "errored".
// type string This is the type of action that the object
// represents. For example, this could be "transfer" to represent the state of
// an image transfer action.
// started_at string A time value given in ISO8601 combined date
// and time format that represents when the action was initiated.
// completed_at nullable string A time value given in ISO8601 combined date
// and time format that represents when the action was completed.
// resource_id number A unique identifier for the resource that
// the action is associated with.
// resource_type string The type of resource that the action is
// associated with.
// region nullable string (deprecated) A slug representing the region
// where the action occurred.
// region_slug nullable string A slug representing the region where the
// action occurred.
use std::fmt;
use std::borrow::Cow;
use response::region::Region;
use response::NamedResponse;
use response;
#[derive(Deserialize, Debug)]
pub struct Action {
id: f64,
status: String,
#[serde(rename="type")]
action_type: String,
started_at: String,
completed_at: Option<String>,
resource_id: f64,
resource_type: String,
region: Region,
region_slug: Option<String>,
}
impl response::NotArray for Action {}
impl fmt::Display for Action {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"ID: {}\n\
Status: {}\n\
Type: {}\n\
Started At: {}\n\
Completed At: {}\n\
Resource ID: {}\n\
Resource Type: {}\n\
Region Slug: {}\n\
Region:\n\t{}",
self.id,
self.status,
self.action_type,
self.started_at,
if self.completed_at.is_some() {
self.completed_at.clone().unwrap()
} else {
"None".to_owned()
},
self.resource_id,
self.resource_type,
if self.region_slug.is_some() {
self.region_slug.clone().unwrap()
} else {
"None".to_owned()
},
self.region.to_string().replace("\n", "\n\t"))
}
}
pub type Actions = Vec<Action>;
impl NamedResponse for Action {
fn name<'a>() -> Cow<'a, str> { "action".into() }
}
|
use core::cell::UnsafeCell;
use core::marker::{PhantomData, PhantomPinned};
use core::pin::Pin;
use embassy::interrupt::{Interrupt, InterruptExt};
pub trait PeripheralState {
type Interrupt: Interrupt;
fn on_interrupt(&mut self);
}
pub struct PeripheralMutex<S: PeripheralState> {
state: UnsafeCell<S>,
irq_setup_done: bool,
irq: S::Interrupt,
_not_send: PhantomData<*mut ()>,
_pinned: PhantomPinned,
}
impl<S: PeripheralState> PeripheralMutex<S> {
pub fn new(state: S, irq: S::Interrupt) -> Self {
Self {
irq,
irq_setup_done: false,
state: UnsafeCell::new(state),
_not_send: PhantomData,
_pinned: PhantomPinned,
}
}
pub fn register_interrupt(self: Pin<&mut Self>) {
let this = unsafe { self.get_unchecked_mut() };
if this.irq_setup_done {
return;
}
this.irq.disable();
this.irq.set_handler(|p| {
// Safety: it's OK to get a &mut to the state, since
// - We're in the IRQ, no one else can't preempt us
// - We can't have preempted a with() call because the irq is disabled during it.
let state = unsafe { &mut *(p as *mut S) };
state.on_interrupt();
});
this.irq
.set_handler_context((&mut this.state) as *mut _ as *mut ());
this.irq.enable();
this.irq_setup_done = true;
}
pub fn with<R>(self: Pin<&mut Self>, f: impl FnOnce(&mut S, &mut S::Interrupt) -> R) -> R {
let this = unsafe { self.get_unchecked_mut() };
this.irq.disable();
// Safety: it's OK to get a &mut to the state, since the irq is disabled.
let state = unsafe { &mut *this.state.get() };
let r = f(state, &mut this.irq);
this.irq.enable();
r
}
}
impl<S: PeripheralState> Drop for PeripheralMutex<S> {
fn drop(&mut self) {
self.irq.disable();
self.irq.remove_handler();
}
}
|
#[cfg(feature = "backend_session_logind")]
pub mod logind;
|
mod instruction;
mod operation;
use instruction::Instruction;
#[derive(Debug, Clone)]
pub struct State{
input: Vec<i64>,
output: Vec<i64>,
address: i64,
opcodes: Vec<i64>,
relative_base: i64
}
#[derive(Debug)]
pub enum HaltState{
WaitingForInput,
Done
}
pub fn address_counter(opcodes: &Vec<i64>, input: &Vec<i64>) -> State {
let mut s = State::new(opcodes, input);
s.process();
return s;
}
impl State {
pub fn new(opcodes: &Vec<i64>, input: &Vec<i64>) -> State{
let reversed_input: Vec<i64> =
input
.iter()
.rev()
.map(|i|
*i
).collect();
return State{
output: vec![],
input: reversed_input,
opcodes: (*opcodes).clone(),
address: 0,
relative_base: 0
};
}
pub fn process(&mut self) -> HaltState{
loop {
let inst =
match Instruction::new(&self.address, &self.opcodes){
Some(i) => i,
None => return HaltState::Done
};
match self.execute_instruction(&inst){
Some(hs) => return hs,
None => ()
}
}
}
pub fn process_until(&mut self, outputs: usize){
loop {
let inst =
match Instruction::new(&self.address, &self.opcodes){
Some(i) => i,
None => return
};
self.execute_instruction(&inst);
if self.output.len() == outputs{
return
}
}
}
pub fn write(&mut self, address: usize, value: i64){
if !(address < self.opcodes.len()){
self.opcodes.resize_with(address + 1, || 0);
}
self.opcodes[address] = value;
}
pub fn is_halted(&self) -> bool{
return !(self.address < self.opcodes.len() as i64);
}
#[allow(dead_code)]
pub fn get_input(&self) -> &Vec<i64>{
return &self.input;
}
pub fn set_input(&mut self, inp: Vec<i64>){
self.input = inp;
}
pub fn get_output(&self) -> &Vec<i64>{
return &self.output;
}
pub fn clear_output(&mut self){
return self.clean_output();
}
pub fn clean_output(&mut self){
self.output = vec![];
}
pub fn add_input(&mut self, input: i64){
self.input.insert(0, input);
}
fn increment_address(&mut self, amount: i64){
self.set_address(self.address + amount);
}
fn set_address(&mut self, new_address: i64){
self.address = new_address;
}
pub fn update_relative_base(&mut self, diff: i64){
self.relative_base += diff;
}
fn execute_instruction(&mut self, ins: &Instruction) -> Option<HaltState>{
return ins.get_operation().process(self, ins);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn day2_p1_example() {
assert_eq!(
address_counter(&vec![1,0,0,0,99], &vec![]).opcodes,
vec![2,0,0,0,99]
);
assert_eq!(
address_counter(&vec![2,3,0,3,99], &vec![]).opcodes,
vec![2,3,0,6,99]
);
assert_eq!(
address_counter(&vec![2,4,4,5,99, 0], &vec![]).opcodes,
vec![2,4,4,5,99, 9801]
);
assert_eq!(
address_counter(&vec![1,1,1,4,99, 5, 6, 0, 99], &vec![]).opcodes,
vec![30,1,1,4,2,5,6,0,99]
);
}
#[test]
fn day5_example() {
assert_eq!(
address_counter(&vec![1101, 100, -1, 4, 0], &vec![]).opcodes,
vec![1101, 100, -1, 4, 99]
);
assert_eq!(
address_counter(&vec![3,9,8,9,10,9,4,9,99,-1,8], &vec![8]).output,
vec![1]
);
assert_eq!(
address_counter(&vec![3,9,8,9,10,9,4,9,99,-1,8], &vec![1]).output,
vec![0]
);
let input: Vec<i64> = vec![
3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,
1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,
999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99];
assert_eq!(
address_counter(&input, &vec![7]).output,
vec![999]
);
assert_eq!(
address_counter(&input, &vec![8]).output,
vec![1000]
);
assert_eq!(
address_counter(&input, &vec![9]).output,
vec![1001]
);
}
#[test]
fn day5_equal_to_eight(){
assert_eq!(
address_counter(&vec![3,9,8,9,10,9,4,9,99,-1,8], &vec![8]).output,
vec![1]
);
}
#[test]
fn day9_relative_base(){
let v: Vec<i64> = vec![109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99];
assert_eq!(
address_counter(&v, &vec![]).output,
v
);
assert_eq!(
address_counter(&vec![1102,34915192,34915192,7,4,7,99,0], &vec![]).output,
vec![1219070632396864]
);
assert_eq!(
address_counter(&vec![104,1125899906842624,99], &vec![]).output,
vec![1125899906842624]
);
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
//! A circle buffer for use with [std::io::Read]
//! ```
//! # use jcirclebuffer::CircleBuffer;
//! use std::io::Read;
//! let mut some_read = std::io::Cursor::new(b"banana");
//!
//! let mut my_buf = CircleBuffer::default();
//! let read_zone: &mut [u8] = my_buf.get_fillable_area().unwrap();
//! let read_amount = Read::read(&mut some_read, read_zone).unwrap();
//!
//! assert!(my_buf.view_nocopy().is_empty());
//! my_buf.fill(read_amount);
//! assert_eq!(my_buf.view_nocopy(), b"banana");
//! my_buf.consume(2);
//! assert_eq!(my_buf.view_nocopy(), b"nana");
//! ```
//! The buffer is implemented as single unmoving memory buffer that keeps track of the "start"
//! point and occupied length. [CircleBuffer::get_fillable_area] will return the current
//! _contiguous_ fillable area. Depending on the location of the "wrap point" (the end of the
//! underlying buffer) it may be appropriate to fill the entire fillable area, then immediately
//! request a new fillable area without consuming any data.
//!
//! The example below shows how the circle buffer handles wrapping.
//! ```
//! # use jcirclebuffer::CircleBuffer;
//! # use std::io::Read;
//! let mut some_read = std::io::Cursor::new(b"abc");
//! let mut other_read = std::io::Cursor::new(b"defghijk");
//! let mut my_buf = CircleBuffer::with_size(4);
//!
//! let read_zone: &mut [u8] = my_buf.get_fillable_area().unwrap();
//! let read_amount = Read::read(&mut some_read, read_zone).unwrap();
//! my_buf.fill(read_amount);
//!
//! assert_eq!(read_amount, 3);
//! assert_eq!(my_buf.view_nocopy(), b"abc");
//! my_buf.consume(2);
//! assert_eq!(my_buf.view_nocopy(), b"c");
//! assert_eq!(my_buf.get_fillable_area().unwrap().len(), 1);
//!
//! let read_zone: &mut [u8] = my_buf.get_fillable_area().unwrap();
//! let read_amount = Read::read(&mut other_read, read_zone).unwrap();
//! assert_eq!(read_amount, 1);
//! my_buf.fill(read_amount);
//! assert_eq!(my_buf.get_fillable_area().unwrap(), b"ab");
//! ```
//! If you want to view a contiguous version of the possibly discontiguous data in the buffer,
//! you can use [CircleBuffer::view]. This will show contiguous data in-place but will perform
//! a copy if the desired data crosses the "wrap point"
//! ```
//! # use jcirclebuffer::CircleBuffer;
//! # use std::io::Read;
//! let mut some_read = std::io::Cursor::new(b"abcdefghijk");
//! let mut my_buf = CircleBuffer::with_size(4);
//!
//! let read_zone = my_buf.get_fillable_area().unwrap();
//! let read_amount = Read::read(&mut some_read, read_zone).unwrap();
//! my_buf.fill(read_amount);
//! my_buf.consume(1);
//! let read_zone = my_buf.get_fillable_area().unwrap();
//! let read_amount = Read::read(&mut some_read, read_zone).unwrap();
//! my_buf.fill(read_amount);
//!
//! // Underlying memory layout is b"ebcd"
//! assert_eq!(my_buf.view_parts(4), (&b"bcd"[..], &b"e"[..]));
//! my_buf.view(4, |data| assert_eq!(data, b"bcde")); // requires feature "std"
//! ```
//! You can keep a circle buffer entirely on the stack using [CircleBuffer::new]:
//! ```
//! # use jcirclebuffer::CircleBuffer;
//! CircleBuffer::new([0; 4]); // Does not require feature "std"
//! ```
/// A circle buffer based on an unmoving underlying buffer.
pub struct CircleBuffer<T> {
start: usize,
len: usize,
buf: T,
}
#[cfg(feature = "std")]
impl Default for CircleBuffer<Vec<u8>> {
/// An easy way to get a heap allocated circle buffer. Backed by a 1MiB [std::vec::Vec]. Requires "std".
fn default() -> Self {
CircleBuffer::with_size(1_048_576) // 1MiB
}
}
#[cfg(feature = "std")]
impl CircleBuffer<Vec<u8>> {
/// Request a heap allocated circle buffer of a certain size. Requires "std".
pub fn with_size(size: usize) -> Self {
let buf = vec![0; size];
CircleBuffer {
start: 0,
len: 0,
buf,
}
}
}
impl<T> CircleBuffer<T>
where
T: AsRef<[u8]> + AsMut<[u8]>,
{
/// Make a circle buffer backed by a user-provided buffer. This can be used to make a stack allocated circle buffer.
pub fn new(buf: T) -> CircleBuffer<T> {
CircleBuffer {
start: 0,
len: 0,
buf,
}
}
/// Request the size of the underlying buffer. Doesn't change for the life of the circle buffer.
pub fn size(&self) -> usize {
self.buf.as_ref().len()
}
/// Indicate that a certain amount of the buffer has been filled with meaningful content.
/// Almost always used as:
/// ```
/// # use jcirclebuffer::CircleBuffer;
/// # let mut my_buf = CircleBuffer::default();
/// # let mut something = std::io::Cursor::new(b"banana");
/// let read_zone = my_buf.get_fillable_area().unwrap();
/// let read_amount = std::io::Read::read(&mut something, read_zone).unwrap();
/// my_buf.fill(read_amount);
/// ```
pub fn fill(&mut self, amt: usize) {
self.len = self.len.checked_add(amt).unwrap();
assert!(self.len <= self.size());
}
#[cfg(feature = "std")]
/// A convenience wrapper around get_fillable_area() -> Read::read() -> buf.fill(amt).
/// Doesn't fill() if Read::read returns an error.
pub fn read<U>(&mut self, reader: &mut U) -> std::io::Result<usize>
where
U: std::io::Read,
{
let read_zone = self.get_fillable_area().expect("read buffer full");
let result = std::io::Read::read(reader, read_zone);
if let Ok(amt) = result {
self.fill(amt);
}
result
}
/// Copy data into the circle buffer, possibly crossing the wrap point. Does fill() automatically. Panics if capacity is not available.
pub fn extend(&mut self, data: &[u8]) {
let head = self.get_fillable_area().unwrap();
let head_amt = core::cmp::min(data.len(), head.len());
head[..head_amt].copy_from_slice(&data[..head_amt]);
self.fill(head_amt);
if head_amt == data.len() {
return;
}
let tail = self.get_fillable_area().unwrap();
let remainder = data.len().checked_sub(head_amt).unwrap();
tail[..remainder].copy_from_slice(&data[head_amt..]);
self.fill(remainder);
}
/// The current amount of meaningful data in the buffer. fill() makes this go up, consume() makes it go down.
pub fn len(&self) -> usize {
self.len
}
/// The total amount of free space available for filling.
pub fn available(&self) -> usize {
self.size() - self.len()
}
/// len() == 0
pub fn is_empty(&self) -> bool {
self.len == 0
}
/// len() == size()
pub fn is_full(&self) -> bool {
self.len == self.size()
}
#[cfg(feature = "std")]
/// Allows a contiguous view of potentially non-contiguous underlying data. MAY INCUR A COPY. Should only incur copies rarely if the size of the buffer is large relative to the possible message size. Requires feature "std".
pub fn view<R>(&self, amt: usize, callback: impl FnOnce(&[u8]) -> R) -> R {
let (head, tail) = self.view_parts(amt);
if tail.is_empty() {
return callback(head);
}
let mut view_buffer = vec![0; head.len() + tail.len()];
view_buffer[..head.len()].copy_from_slice(head);
view_buffer[head.len()..].copy_from_slice(tail);
callback(&view_buffer)
}
/// Allows a contgious view of potentially non-contiguous data using a user-provided buffer. May incur a copy but will not incur a heap allocation. Available without feature "std".
pub fn view_provided<C, R>(&self, buf: &mut [u8], callback: C) -> R
where
C: FnOnce(&[u8]) -> R,
{
let amt = buf.len();
let (head, tail) = self.view_parts(amt);
if tail.is_empty() {
return callback(head);
}
buf[..head.len()].copy_from_slice(head);
buf[head.len()..].copy_from_slice(tail);
callback(buf)
}
/// view_provided but mut. Changes made to the view slice will be reflected in the only the circlebuffer buffer the view did not cross a wrap point and will be reflected in only in the provided buffer if the view did cross a wrap point.
pub fn view_provided_mut<C, R>(&mut self, buf: &mut [u8], callback: C) -> R
where
C: FnOnce(&mut [u8]) -> R,
{
let amt = buf.len();
let (head, tail) = self.view_parts_mut(amt);
if tail.is_empty() {
return callback(head);
}
buf[..head.len()].copy_from_slice(head);
buf[head.len()..].copy_from_slice(tail);
callback(buf)
}
/// View potentially non-contiguous data. Will never incur a copy. Returns (head, tail). All the data will be in the head unless data crosses the wrap point.
pub fn view_parts(&self, amt: usize) -> (&[u8], &[u8]) {
assert!(amt <= self.len);
let start = self.start;
let view_end = start.checked_add(amt).unwrap();
if view_end <= self.size() {
return (&self.buf.as_ref()[start..view_end], &[]);
}
let buf = self.buf.as_ref();
let (left, data_head) = buf.split_at(start);
let (data_tail, _) = left.split_at(view_end % self.size());
return (data_head, data_tail);
}
/// view_parts but mutable.
pub fn view_parts_mut(&mut self, amt: usize) -> (&mut [u8], &mut [u8]) {
assert!(amt <= self.len);
let start = self.start;
let view_end = start.checked_add(amt).unwrap();
if view_end <= self.size() {
return (&mut self.buf.as_mut()[start..view_end], &mut []);
}
let remainder: usize = view_end % self.size();
let buf = self.buf.as_mut();
let (left, data_head) = buf.split_at_mut(start);
let (data_tail, _) = left.split_at_mut(remainder);
return (data_head, data_tail);
}
/// Returns the maximum amount of meaningful contiguous data. Will never incur a copy.
pub fn view_nocopy(&self) -> &[u8] {
let mut view_end = self.start.checked_add(self.len).unwrap();
if view_end > self.size() {
view_end = self.size();
}
&self.buf.as_ref()[self.start..view_end]
}
/// Marks data as consumed. Advances the "start" cursor by amt. If this results in the buffer being empty, moves the start cursor to 0. Does not touch the underlying buffer.
pub fn consume(&mut self, amt: usize) {
self.len = self.len.checked_sub(amt).unwrap();
if self.len == 0 {
self.start = 0;
} else {
self.start = self.start.checked_add(amt).unwrap() % self.size();
}
}
/// Returns the next contiguous unused area in the underlying buffer. Returns None if the buffer is full.
/// There are potentially two separate contiguous unused areas in the buffer at any one time. If you use up one of them (and call fill()) then you will be able to get to the other one.
pub fn get_fillable_area(&mut self) -> Option<&mut [u8]> {
if self.len == self.size() {
return None;
}
let start = self.start;
let end = self.start.checked_add(self.len).unwrap() % self.size();
if end < start {
Some(&mut self.buf.as_mut()[end..start])
} else {
Some(&mut self.buf.as_mut()[end..])
}
}
}
impl<T> std::io::Write for CircleBuffer<T>
where
T: AsRef<[u8]> + AsMut<[u8]>,
{
fn write(&mut self, data: &[u8]) -> std::io::Result<usize> {
let available = self.available();
if available == 0 {
return Err(std::io::Error::new(std::io::ErrorKind::Other, "Full"));
}
let amt = std::cmp::min(data.len(), available);
self.extend(&data[..amt]);
Ok(amt)
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
impl<T> std::io::Read for CircleBuffer<T>
where
T: AsRef<[u8]> + AsMut<[u8]>,
{
fn read(&mut self, dest: &mut [u8]) -> std::io::Result<usize> {
let amt = std::cmp::min(self.len(), dest.len());
let parts = self.view_parts(amt);
dest[..parts.0.len()].copy_from_slice(parts.0);
dest[parts.0.len()..amt].copy_from_slice(parts.1);
self.consume(amt);
Ok(amt)
}
}
#[cfg(test)]
mod tests {
use super::CircleBuffer;
#[test]
fn circle_buffer_tests() {
let mut read = std::io::Cursor::new(b"abcdefghijklmnopqrstuvwxyz");
let mut circle_buffer = CircleBuffer::new([0u8; 4]);
assert!(circle_buffer.is_empty());
let read_size = circle_buffer.read(&mut read).unwrap();
assert_eq!(read_size, 4);
assert_eq!(circle_buffer.len(), read_size);
assert_eq!(circle_buffer.view_nocopy(), b"abcd");
assert_eq!(circle_buffer.view_parts(4), (&b"abcd"[..], &b""[..]));
assert!(circle_buffer.is_full());
assert!(circle_buffer.get_fillable_area().is_none());
circle_buffer.view(4, |data| assert_eq!(data, b"abcd"));
let mut view_buf = [0u8; 4];
circle_buffer.view_provided(&mut view_buf, |data| assert_eq!(data, b"abcd"));
circle_buffer.consume(2);
assert_eq!(circle_buffer.view_nocopy(), b"cd");
let read_size = circle_buffer.read(&mut read).unwrap();
assert_eq!(read_size, 2);
assert_eq!(circle_buffer.view_parts(4), (&b"cd"[..], &b"ef"[..]));
circle_buffer.view(4, |data| assert_eq!(data, b"cdef"));
let mut view_buf = [0u8; 4];
circle_buffer.view_provided(&mut view_buf[..], |data| assert_eq!(data, b"cdef"));
assert_eq!(circle_buffer.view_nocopy(), b"cd");
circle_buffer.consume(4);
let mut big_buffer = CircleBuffer::default();
assert_eq!(big_buffer.read(&mut read).unwrap(), 20);
big_buffer.extend(b"banana");
big_buffer.consume(20);
big_buffer.view(6, |x| assert_eq!(x, b"banana"));
let mut buffer = CircleBuffer::new([0u8; 4]);
buffer.extend(b"abcd");
buffer.consume(2);
buffer.extend(b"ef");
let mut read_buf = [0u8; 3];
let result = std::io::Read::read(&mut buffer, &mut read_buf).unwrap();
assert_eq!(result, 3);
assert_eq!(b"cde", &read_buf);
assert_eq!(buffer.len(), 1);
}
}
|
use {user_fn, free_fn, ast_fn, Context, State};
// Keep these here for when you want to build huge changes
// pub fn load_all<T>(_: T) {}
// pub fn load_debug<T>(_: T) {}
pub mod arithmetic;
pub mod math;
pub mod core;
pub mod types;
pub mod list;
pub mod logical;
pub mod map;
pub mod debugger;
pub mod option;
pub mod util {
use {AresError, AresResult};
pub fn expect_arity<F, S: Into<String>, T>(slice: &[T],
expected: F,
expect_str: S)
-> AresResult<()>
where S: Into<String>,
F: FnOnce(usize) -> bool
{
let len = slice.len();
if expected(len) {
Ok(())
} else {
Err(AresError::UnexpectedArity {
found: len as u16,
expected: expect_str.into(),
})
}
}
}
fn eval_into<S: State + ?Sized, P: AsRef<str>>(src: &P, ctx: &mut Context<S>) {
use std::mem::uninitialized;
let mut dummy: &mut S = unsafe { uninitialized() };
let mut ctx = ctx.load(dummy);
ctx.eval_str(src.as_ref()).unwrap();
}
pub fn load_all<S: State + ?Sized>(ctx: &mut Context<S>) {
load_core(ctx);
load_option(ctx);
load_logical(ctx);
load_list(ctx);
load_math(ctx);
load_arithmetic(ctx);
load_map(ctx);
load_types(ctx);
}
pub fn load_debug<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("debugger", ast_fn("debugger", self::debugger::debugger));
}
pub fn load_map<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("hash-map", ast_fn("hash-map", self::map::hash_map));
}
pub fn load_logical<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("and", ast_fn("and", self::logical::and));
ctx.set_fn("or", ast_fn("or", self::logical::or));
ctx.set_fn("xor", ast_fn("xor", self::logical::xor));
}
pub fn load_option<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("some", free_fn("some", self::option::some));
ctx.set_fn("none", free_fn("none", self::option::none));
ctx.set_fn("unwrap", free_fn("unwrap", self::option::unwrap));
}
pub fn load_core<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("eval", user_fn("eval", self::core::eval));
ctx.set_fn("apply", user_fn("apply", self::core::apply));
ctx.set_fn("quote", ast_fn("quote", self::core::quote));
ctx.set_fn("quasiquote", ast_fn("quasiquote", self::core::quasiquote));
ctx.set_fn("macroexpand",
user_fn("macroexpand", self::core::macroexpand));
ctx.set_fn("unquote", ast_fn("unquote", self::core::unquote_error));
ctx.set_fn("unquote-splicing",
ast_fn("unquote-splicing", self::core::unquote_error));
ctx.set_fn("if", ast_fn("if", self::core::cond));
ctx.set_fn("let", ast_fn("let", self::core::lett));
ctx.set_fn("set", ast_fn("set", self::core::set));
ctx.set_fn("define", ast_fn("define", self::core::define));
ctx.set_fn("define-macro",
ast_fn("define-macro", self::core::define_macro));
ctx.set_fn("lambda", ast_fn("lambda", self::core::lambda));
ctx.set_fn("gensym", user_fn("gensym", self::core::gensym));
}
pub fn load_list<S: State + ?Sized>(ctx: &mut Context<S>) {
{
ctx.set_fn("build-list", ast_fn("build-list", self::list::build_list));
ctx.set_fn("for-each", user_fn("for-each", self::list::foreach));
}
eval_into(&format!("(define list {})", self::list::LIST), ctx);
eval_into(&format!("(define map {})", self::list::MAP), ctx);
eval_into(&format!("(define fold-left {})", self::list::FOLD_LEFT),
ctx);
eval_into(&format!("(define filter {})", self::list::FILTER), ctx);
eval_into(&format!("(define flatten {})", self::list::FLATTEN), ctx);
eval_into(&format!("(define concat {})", self::list::CONCAT), ctx);
}
pub fn load_arithmetic<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("=", free_fn("=", self::core::equals));
ctx.set_fn("+", free_fn("+", self::arithmetic::add_ints));
ctx.set_fn("+.", free_fn("+.", self::arithmetic::add_floats));
ctx.set_fn("-", free_fn("-", self::arithmetic::sub_ints));
ctx.set_fn("-.", free_fn("-.", self::arithmetic::sub_floats));
ctx.set_fn("*", free_fn("*", self::arithmetic::mul_ints));
ctx.set_fn("*.", free_fn("*.", self::arithmetic::mul_floats));
ctx.set_fn("/", free_fn("/", self::arithmetic::div_ints));
ctx.set_fn("/.", free_fn("/.", self::arithmetic::div_floats));
}
pub fn load_math<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("nan?", free_fn("nan?", self::math::is_nan));
ctx.set_fn("infinite?", free_fn("infinite?", self::math::is_infinite));
ctx.set_fn("finite?", free_fn("finite?", self::math::is_finite));
ctx.set_fn("normal?", free_fn("normal?", self::math::is_normal));
ctx.set_fn("floor", free_fn("floor", self::math::floor));
ctx.set_fn("ceil", free_fn("ceil", self::math::ceil));
ctx.set_fn("round", free_fn("round", self::math::round));
ctx.set_fn("trunc", free_fn("trunc", self::math::trunc));
ctx.set_fn("fract", free_fn("fract", self::math::fract));
ctx.set_fn("sign_positive?",
free_fn("sign_positive?", self::math::is_sign_positive));
ctx.set_fn("sign_negative?",
free_fn("sign_negative?", self::math::is_sign_negative));
ctx.set_fn("recip", free_fn("recip", self::math::recip));
ctx.set_fn("sqrt", free_fn("sqrt", self::math::sqrt));
ctx.set_fn("exp", free_fn("exp", self::math::exp));
ctx.set_fn("exp2", free_fn("exp2", self::math::exp2));
ctx.set_fn("ln", free_fn("ln", self::math::ln));
ctx.set_fn("log2", free_fn("log2", self::math::log2));
ctx.set_fn("log10", free_fn("log10", self::math::log10));
ctx.set_fn("->degrees", free_fn("->degrees", self::math::to_degrees));
ctx.set_fn("->radians", free_fn("->radians", self::math::to_radians));
ctx.set_fn("cbrt", free_fn("cbrt", self::math::cbrt));
ctx.set_fn("sin", free_fn("sin", self::math::sin));
ctx.set_fn("cos", free_fn("cos", self::math::cos));
ctx.set_fn("tan", free_fn("tan", self::math::tan));
ctx.set_fn("asin", free_fn("asin", self::math::asin));
ctx.set_fn("acos", free_fn("acos", self::math::acos));
ctx.set_fn("atan", free_fn("atan", self::math::atan));
ctx.set_fn("exp_m1", free_fn("exp_m1", self::math::exp_m1));
ctx.set_fn("ln_1p", free_fn("ln_1p", self::math::ln_1p));
ctx.set_fn("sinh", free_fn("sinh", self::math::sinh));
ctx.set_fn("cosh", free_fn("cosh", self::math::cosh));
ctx.set_fn("tanh", free_fn("tanh", self::math::tanh));
ctx.set_fn("asinh", free_fn("asinh", self::math::asinh));
ctx.set_fn("acosh", free_fn("acosh", self::math::acosh));
ctx.set_fn("atanh", free_fn("atanh", self::math::atanh));
ctx.set_fn("count_ones", free_fn("count_ones", self::math::count_ones));
ctx.set_fn("count_zeros",
free_fn("count_zeros", self::math::count_zeros));
ctx.set_fn("leading_zeros",
free_fn("leading_zeros", self::math::leading_zeros));
ctx.set_fn("trailing_zeros",
free_fn("trailing_zeros", self::math::trailing_zeros));
ctx.set_fn("swap_bytes", free_fn("swap_bytes", self::math::swap_bytes));
ctx.set_fn("->big-endian", free_fn("->big-endian", self::math::to_be));
ctx.set_fn("->little-endian",
free_fn("->little-endian", self::math::to_le));
ctx.set_fn("abs", free_fn("abs", self::math::abs));
ctx.set_fn("signum", free_fn("signum", self::math::signum));
ctx.set_fn("positive?", free_fn("positive?", self::math::is_positive));
ctx.set_fn("negative?", free_fn("negative?", self::math::is_negative));
}
pub fn load_types<S: State + ?Sized>(ctx: &mut Context<S>) {
ctx.set_fn("->int", free_fn("->int", self::types::to_int));
ctx.set_fn("->float", free_fn("->float", self::types::to_float));
ctx.set_fn("->string", user_fn("->string", self::types::to_string));
ctx.set_fn("->bool", free_fn("->bool", self::types::to_bool));
ctx.set_fn("int?", free_fn("int?", self::types::is_int));
ctx.set_fn("float?", free_fn("float?", self::types::is_float));
ctx.set_fn("bool?", free_fn("bool?", self::types::is_bool));
ctx.set_fn("string?", free_fn("string?", self::types::is_string));
ctx.set_fn("list?", free_fn("list?", self::types::is_list));
ctx.set_fn("lambda?", free_fn("lambda?", self::types::is_lambda));
ctx.set_fn("foreign-fn?",
free_fn("foreign-fn?", self::types::is_foreign_fn));
ctx.set_fn("executable",
free_fn("executable", self::types::is_executable));
ctx.set_fn("option?", free_fn("option?", self::types::is_option));
ctx.set_fn("some?", free_fn("some?", self::types::is_some));
ctx.set_fn("none?", free_fn("none?", self::types::is_none));
}
|
mod builder;
mod genesis;
pub mod handlers;
pub mod helpers;
pub mod networker;
mod pool;
pub mod requests;
mod runner;
mod types;
pub use self::builder::PoolBuilder;
pub use self::genesis::PoolTransactions;
pub use self::pool::{LocalPool, Pool, PoolImpl, SharedPool};
pub use self::requests::{RequestResult, RequestTarget, TimingResult};
pub use self::runner::PoolRunner;
pub use self::types::{LedgerType, NodeKeys, ProtocolVersion, Verifiers};
|
use kiss3d::camera::ArcBall;
use kiss3d::light::Light;
use kiss3d::nalgebra::{Point3, Translation3, UnitQuaternion, Vector3};
use kiss3d::scene::SceneNode;
use kiss3d::window::Window;
const POLE_HALF_LENGTH: f32 = 1.0;
const POLE_LENGTH: f32 = 2.0 * POLE_HALF_LENGTH;
const POLE_Z_SHIFT: f32 = POLE_HALF_LENGTH + 0.11;
const POLE_MASS: f32 = 0.5;
const CART_MASS: f32 = 1.0;
const ALL_MASS: f32 = CART_MASS + POLE_MASS;
const POLE_MASS_LENGTH: f32 = POLE_MASS * POLE_HALF_LENGTH;
const POLE_MASS_LENGTH_2: f32 = POLE_MASS_LENGTH * POLE_HALF_LENGTH;
const POLE_MASS_2_LENGTH_2: f32 = POLE_MASS_LENGTH * POLE_MASS_LENGTH;
const POLE_X_INERTIA: f32 = POLE_MASS_LENGTH / 3.0;
const INERTIA_ALL_MASS: f32 = POLE_X_INERTIA * ALL_MASS;
const G: f32 = 9.81;
use std::f32::consts::{PI, TAU};
pub struct KissScene {
camera: ArcBall,
window: Window,
cart: SceneNode,
pole: SceneNode,
_ground: SceneNode,
}
impl Default for KissScene {
fn default() -> Self {
let eye = Point3::<f32>::new(10.0, 10.0, 5.0);
let look_at = Point3::<f32>::new(0.1, 0.1, 0.1);
let mut camera = ArcBall::new(eye, look_at);
camera.set_up_axis(Vector3::<f32>::z());
let mut window = Window::new("Pole-cart");
window.set_background_color(0.9, 0.9, 0.9);
window.set_light(Light::StickToCamera);
let mut ground = window.add_cube(100.0, 100.0, 0.1);
ground.set_local_translation(Translation3::<f32>::new(0.0, 0.0, -1.0));
let mut cart = window.add_cube(0.8, 1.2, 0.2);
cart.set_color(0.5, 0.1, 0.7);
let mut pole = window.add_cube(0.05, 0.05, POLE_LENGTH);
pole.set_color(0.1, 0.5, 0.4);
pole.set_local_translation(Translation3::<f32>::new(0.0, 0.0, POLE_Z_SHIFT));
Self {
camera,
window,
cart,
pole,
_ground: ground,
}
}
}
impl KissScene {
pub fn render(&mut self, state: &State) -> bool {
self.cart
.set_local_translation(Translation3::new(0.0, state.cart_position, 0.0));
let (sa, ca) = state.pole_angle.sin_cos();
let pole_y_pos = state.cart_position - POLE_Z_SHIFT * sa;
let pole_z_pos = POLE_Z_SHIFT * ca;
self.pole
.set_local_translation(Translation3::new(0.0, pole_y_pos, pole_z_pos));
self.pole
.set_local_rotation(UnitQuaternion::<f32>::from_axis_angle(
&Vector3::x_axis(),
state.pole_angle,
));
let cart_pos = self.cart.data().local_translation().vector;
let cart_rot = self.cart.data().local_rotation();
let origin = &Point3::from(cart_pos);
let end_i = &Point3::from(cart_pos + cart_rot * Vector3::<f32>::x());
let end_j = &Point3::from(cart_pos + cart_rot * Vector3::<f32>::y());
let end_k = &Point3::from(cart_pos + cart_rot * Vector3::<f32>::z());
self.window
.draw_line(origin, end_i, &Point3::new(1.0, 0.0, 0.0));
self.window
.draw_line(origin, end_j, &Point3::new(0.0, 1.0, 0.0));
self.window
.draw_line(origin, end_k, &Point3::new(0.0, 0.0, 1.0));
self.window.render_with_camera(&mut self.camera)
}
}
#[derive(Default)]
pub struct State {
pub cart_position: f32,
pub cart_velocity: f32,
pub pole_angle: f32,
pub pole_angular_velocity: f32,
}
// TODO add drag
//pub struct Parameters {
// pub pole_drag_coefficient: f32,
// pub cart_drag_coefficient: f32,
//}
impl State {
pub fn propagate_dynamics(&mut self, input_force: f32, dt: f32) {
let (sa, ca) = self.pole_angle.sin_cos();
let denominator = INERTIA_ALL_MASS + POLE_MASS_LENGTH_2 * (CART_MASS + POLE_MASS * sa * sa);
let aux = POLE_MASS_LENGTH * self.pole_angular_velocity.powi(2) * sa;
let acc_nominator = (POLE_X_INERTIA + POLE_MASS_LENGTH_2) * (input_force + aux)
- G * POLE_MASS_2_LENGTH_2 * sa * ca;
let ang_acc_nominator =
-POLE_MASS_LENGTH * (input_force * ca + aux * ca - ALL_MASS * G * sa);
self.cart_position += self.cart_velocity * dt;
self.cart_velocity += acc_nominator * dt / denominator;
self.pole_angle += self.pole_angular_velocity * dt;
self.pole_angular_velocity += ang_acc_nominator * dt / denominator;
if self.pole_angle < -PI {
self.pole_angle += TAU;
} else if self.pole_angle >= PI {
self.pole_angle -= TAU;
}
}
}
|
//! Combat must happen AFTER states have been calcualted so that we aren't in combat with a
//! unit that got cleaned up at the end of the last loop
// TODO unit timers on how fast combat should happen
use bevy::prelude::*;
use crate::*;
pub fn unit_melee_system(
mut unit_events: ResMut<Events<UnitInteractionEvent>>,
game_speed: Res<GameSpeed>,
unit_query: Query<(&UnitComponent, &CombatComponent)>,
mut health_query: Query<&mut HealthComponent>,
target_query: Query<&CombatComponent>
) {
if game_speed.is_paused() {
return;
}
for (unit, source) in unit_query.iter() {
if let UnitState::Melee(Some(target)) = unit.state {
let mut target_heath = health_query.get_component_mut::<HealthComponent>(target).unwrap();
let target_combat = target_query.get_component::<CombatComponent>(target).unwrap();
if calc_melee_hit(source, &target_combat) {
target_heath.current_health -= calc_damage(source, &target_combat);
if target_heath.current_health < 0.0 {
log::info!("unit dead!");
unit_events.send(UnitInteractionEvent::UnitDied(target));
}
}
}
}
}
pub fn unit_missile_system(
mut unit_events: ResMut<Events<UnitInteractionEvent>>,
game_speed: Res<GameSpeed>,
mut unit_query: Query<(&UnitComponent, &CombatComponent, &mut MissileWeaponComponent)>,
mut health_query: Query<&mut HealthComponent>,
target_query: Query<&CombatComponent>,
) {
if game_speed.is_paused() {
return;
}
for (unit, source, mut missile) in unit_query.iter_mut() {
if let UnitState::Firing(Some(target)) | UnitState::FiringAndMoving(Some(target)) = unit.state {
debug_assert!(missile.is_missile_attack_available());
missile.use_ammo();
let mut target_heath = health_query.get_component_mut::<HealthComponent>(target).unwrap();
let target_combat = target_query.get_component::<CombatComponent>(target).unwrap();
target_heath.current_health -= calc_damage(source, &target_combat);
if target_heath.current_health < 0.0 {
log::info!("unit dead!");
unit_events.send(UnitInteractionEvent::UnitDied(target));
}
}
}
}
/// TODD make determinisic
/// AP damage is always applied. Armour is rolled between 0-100% of base armour value,
/// then subtracted from source normal attack damage
fn calc_damage(source: &CombatComponent, target: &CombatComponent) -> f32 {
source.normal_damage
- (target.armour * rand::random::<f32>())
+ source.ap_damage
}
/// TODD make determinisic
/// melee attack and melee defence are independantly rolled, and if roll_attack is higher
/// a hit is scored
fn calc_melee_hit(source: &CombatComponent, target: &CombatComponent) -> bool {
source.melee_attack * rand::random::<f32>() > target.melee_defence * rand::random::<f32>()
} |
//! # The level (`*.lvl`) file format
//!
//! This module can be used to read the level file format
//! used in the game LEGO Universe.
pub mod file;
pub mod parser;
pub mod reader;
|
use std::{convert::TryInto, io::Write};
use anyhow::Result;
use byteorder::{LittleEndian, WriteBytesExt};
use pasture_core::nalgebra::Vector3;
use super::BitAttributes;
/// Writes the given world space position as a LAS position to the given `writer`
pub(crate) fn write_position_as_las_position<T: Write>(
world_space_position: &Vector3<f64>,
las_header: &las::raw::Header,
mut writer: T,
) -> Result<()> {
let local_x : i32 = (((world_space_position.x - las_header.x_offset) / las_header.x_scale_factor) as i64).try_into().expect("write_position_as_las_position: Position is out of bounds given the current LAS offset and scale!");
let local_y : i32 = (((world_space_position.y - las_header.y_offset) / las_header.y_scale_factor) as i64).try_into().expect("write_position_as_las_position: Position is out of bounds given the current LAS offset and scale!");
let local_z : i32 = (((world_space_position.z - las_header.z_offset) / las_header.z_scale_factor) as i64).try_into().expect("write_position_as_las_position: Position is out of bounds given the current LAS offset and scale!");
writer.write_i32::<LittleEndian>(local_x)?;
writer.write_i32::<LittleEndian>(local_y)?;
writer.write_i32::<LittleEndian>(local_z)?;
Ok(())
}
/// Writes the given `BitAttributes` in LAS format to the given `writer`
pub(crate) fn write_las_bit_attributes<T: Write>(
bit_attributes: BitAttributes,
mut writer: T,
) -> Result<()> {
match bit_attributes {
BitAttributes::Regular(attributes) => {
let mask = (attributes.return_number & 0b111)
| (attributes.number_of_returns & 0b111) << 3
| (attributes.scan_direction_flag & 0b1) << 6
| (attributes.edge_of_flight_line & 0b1) << 7;
writer.write_u8(mask)?;
}
BitAttributes::Extended(attributes) => {
let low_mask =
(attributes.return_number & 0b1111) | (attributes.number_of_returns & 0b1111) << 4;
let high_mask = (attributes.classification_flags & 0b1111)
| (attributes.scanner_channel & 0b11) << 4
| (attributes.scan_direction_flag & 0b1) << 6
| (attributes.edge_of_flight_line & 0b1) << 7;
writer.write_u8(low_mask)?;
writer.write_u8(high_mask)?;
}
}
Ok(())
}
|
pub fn square(s: u32) -> u64 {
if s<1 || s>64 {
panic!("Square must be between 1 and 64");
}
1u64 << (s-1)
}
pub fn total() -> u64 {
u64::max_value()
}
|
use rocket::{ Rocket, Request, Data, Response };
use rocket::http::{ Method, Status, ContentType };
use rocket::fairing::{ Fairing, Info, Kind };
#[derive(Default)]
pub struct Logger {}
impl Fairing for Logger {
fn info(&self) -> Info {
Info {
name: "Logger Fairing",
kind: Kind::Launch | Kind::Request | Kind::Response
}
}
fn on_launch(&self, rocket: &Rocket) {
let config = rocket.config();
info!("Environment Stage: {}", config.environment.to_string());
info!("Server launched on {}:{}", config.address, config.port);
}
fn on_request(&self, request: &mut Request<'_>, data: &Data) {
}
fn on_response(&self, request: &Request<'_>, response: &mut Response<'_>) {
info!(
"{} {} {}",
response.status(),
request.method(),
request.uri()
)
}
} |
use serde::{Deserialize, Serialize};
// Topology shows us the current state of the overall Nym network
#[derive(Serialize, Deserialize, Debug)]
pub struct Topology {
pub validators: Vec<Validator>,
pub mix_nodes: Vec<MixNode>,
pub service_providers: Vec<ServiceProvider>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Validator {
host: String,
public_key: String,
version: String,
last_seen: u64,
location: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MixNode {
host: String,
public_key: String,
version: String,
last_seen: u64,
location: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ServiceProvider {
host: String,
public_key: String,
version: String,
last_seen: u64,
location: String,
}
|
extern crate regex;
use regex::Regex;
use std::fs::File;
use std::io::prelude::*;
fn read_data(filepath: &str) -> std::io::Result<String> {
let mut file = File::open(filepath)?;
let mut contents: String = String::new();
file.read_to_string(&mut contents)?;
Ok(contents.trim().to_string())
}
fn sol1() {
// We could reach better complexity if I just read line by line
// but honestly meh
let tmp = read_data("input");
if tmp.is_err() {
panic!("Sol1's data is crap");
}
let re = Regex::new(r"^(\d+)-(\d+) ([[:alpha:]]): ([[:alpha:]]+)$").unwrap();
println!("{}", tmp.unwrap().split('\n')
//.map(|x| x.to_string())
.filter(|x| {
let caps = re.captures(x).unwrap();
let letter = &caps[3];
let mincount = caps[1].parse::<usize>().unwrap();
let maxcount = caps[2].parse::<usize>().unwrap();
let ccount = caps[4].matches(letter).count();
maxcount >= ccount && mincount <= ccount
})
.count());
}
fn sol2() {
// We could reach better complexity if I just read line by line
// but honestly meh
let tmp = read_data("input");
if tmp.is_err() {
panic!("Sol2's data is crap");
}
let re = Regex::new(r"^(\d+)-(\d+) ([[:alpha:]]): ([[:alpha:]]+)$").unwrap();
println!("{}", tmp.unwrap().split('\n')
//.map(|x| x.to_string())
.filter(|x| {
let caps = re.captures(x).unwrap();
let letter = caps[3].chars().next().unwrap();
let idxone = caps[1].parse::<usize>().unwrap();
let idxtwo = caps[2].parse::<usize>().unwrap();
let pass = caps[4].to_string();
match pass.chars().nth(idxone-1).unwrap() == letter {
true => pass.chars().nth(idxtwo-1).unwrap() != letter,
false => pass.chars().nth(idxtwo-1).unwrap() == letter
}
})
.count());
}
fn main() {
sol1();
sol2();
}
|
pub struct Link <T> {
source: T,
target: T,
root_as_source: T,
left_as_source: T,
right_as_source: T,
size_as_soucre: T,
root_as_target: T,
left_as_target: T,
right_as_target: T,
size_as_target: T
}
|
//! Experiments with parallel processing
//!
//! The provided functions focus on the possibility of
//! returning results while the parser proceeds. Sequences are processesd in
//! batches (`RecordSet`) because sending across channels has a performance
//! impact. FASTA/FASTQ records can be accessed in both the 'worker' function and
//! (after processing) a function running in the main thread.
//!
//! # Search first occurrence of a sequence pattern
//!
//! ```no_run
//! use seq_io::fastq::{Reader,Record};
//! use seq_io::parallel::read_parallel;
//!
//! let reader = Reader::from_path("seqs.fastq").unwrap();
//!
//! read_parallel(reader, 4, 2, |record_set| {
//! // this function does the heavy work
//! for (i, record) in record_set.into_iter().enumerate() {
//! // this is not very efficient code, just for demonstration
//! if let Some(pos) = record.seq().windows(3).position(|s| s == b"AAA") {
//! return Some((i, pos));
//! }
//! }
//! None
//! }, |record_sets| {
//! // This function runs in the main thread. It provides a streaming iterator over
//! // record sets and the corresponding return values from the worker function
//! // (not necessarily in the same order as in the file)
//! while let Some(result) = record_sets.next() {
//! let (record_set, found) = result.unwrap();
//! if let Some((i, pos)) = found {
//! let record = record_set.into_iter().nth(i).unwrap();
//! println!("Found AAA in record {} at position {}", record.id().unwrap(), pos);
//! // this will also stop the worker threads, although with some delay
//! return;
//! }
//! }
//! });
//! ```
//!
//! # Per-record processsing
//! The `parallel_fasta` / `parallel_fastq` functions are designed to efficiently pass
//! results for **each record** to the main thread without having to care about record sets.
//! This example filters sequences by the occurrence of a pattern:
//!
//! ```no_run
//! use seq_io::fastq::{Reader,Record};
//! use seq_io::parallel::parallel_fastq;
//! use std::fs::File;
//! use std::io::BufWriter;
//!
//! let reader = Reader::from_path("seqs.fastq").unwrap();
//! let mut writer = BufWriter::new(File::create("filtered.fastq").unwrap());
//!
//! parallel_fastq(reader, 4, 2,
//! |record, found| { // runs in worker
//! *found = record.seq().windows(3).position(|s| s == b"AAA").is_some();
//! },
//! |record, found| { // runs in main thread
//! if *found {
//! record.write(&mut writer).unwrap();
//! }
//! // Some(value) will stop the reader, and the value will be returned.
//! // In the case of never stopping, we need to give the compiler a hint about the
//! // type parameter, thus the special 'turbofish' notation is needed,
//! // hoping on progress here: https://github.com/rust-lang/rust/issues/27336
//! None::<()>
//! }).unwrap();
//! ```
use std::io;
extern crate crossbeam;
extern crate scoped_threadpool;
use std::marker::PhantomData;
use std::sync::mpsc;
pub trait Reader {
type DataSet: Send;
type Err: Send;
fn fill_data(&mut self, record: &mut Self::DataSet) -> Option<Result<(), Self::Err>>;
}
pub fn read_parallel<R, O, W, F, Out>(
reader: R,
n_threads: u32,
queue_len: usize,
work: W,
func: F,
) -> Out
where
R: Reader + Send,
R::DataSet: Default,
O: Default + Send,
W: Send + Sync + Fn(&mut R::DataSet) -> O,
F: FnMut(&mut ParallelRecordsets<R::DataSet, R::Err, O>) -> Out,
{
read_parallel_init::<_, (), _, (), _, _, (), _, _, Out>(
n_threads,
queue_len,
|| Ok::<_, ()>(reader),
|| Ok::<_, ()>(R::DataSet::default()),
work,
func,
)
.unwrap()
}
/// This function allows initiating the reader and datasets using a closure.
/// This is more flexible and allows readers not to be `Send`
pub fn read_parallel_init<R, E, Ri, Er, O, Di, Ed, W, F, Out>(
n_threads: u32,
queue_len: usize,
reader_init: Ri,
mut dataset_init: Di,
work: W,
func: F,
) -> Result<Out, E>
where
R: Reader,
Ri: Send + FnOnce() -> Result<R, Er>,
Er: Send,
E: From<Er> + From<Ed>,
O: Send,
Di: Send + Sync + FnMut() -> Result<R::DataSet, Ed>,
W: Send + Sync + Fn(&mut R::DataSet) -> O,
F: FnOnce(&mut ParallelRecordsets<R::DataSet, R::Err, O>) -> Out,
{
let (done_send, done_recv) = mpsc::sync_channel(queue_len);
let (empty_send, empty_recv): (mpsc::SyncSender<R::DataSet>, _) = mpsc::sync_channel(queue_len);
crossbeam::scope(|scope| {
let handle = scope.spawn::<_, Result<(), Er>>(move |_| {
let mut reader = reader_init()?;
let mut pool = scoped_threadpool::Pool::new(n_threads);
pool.scoped(|pool_scope| {
let work = &work;
loop {
// recycle an old DataSet sent back after use
let mut data = if let Ok(r) = empty_recv.recv() {
r
} else {
// ParallelRecordsets dropped -> stop
return;
};
let done_send = done_send.clone();
if let Some(res) = reader.fill_data(&mut data) {
match res {
Ok(_) => {
// expensive work carried out by func()
pool_scope.execute(move || {
let out = work(&mut data);
done_send.send(Some(Ok((data, out)))).ok();
});
}
Err(e) => {
done_send.send(Some(Err(e))).ok();
break;
}
}
} else {
break;
}
}
pool_scope.join_all();
done_send.send(None).ok();
});
Ok(())
});
for _ in 0..queue_len {
if empty_send.send(dataset_init()?).is_err() {
break;
}
}
let mut rsets = ParallelRecordsets {
empty_send,
done_recv,
current_recordset: dataset_init()?,
};
let out = func(&mut rsets);
::std::mem::drop(rsets);
handle.join().unwrap()?;
Ok(out)
})
.unwrap()
}
pub struct ParallelRecordsets<R, E, O>
where
R: Send,
E: Send,
O: Send,
{
empty_send: mpsc::SyncSender<R>,
done_recv: mpsc::Receiver<Option<Result<(R, O), E>>>,
current_recordset: R,
}
impl<R, E, O> ParallelRecordsets<R, E, O>
where
R: Send,
E: Send,
O: Send,
{
pub fn next(&mut self) -> Option<Result<(&mut R, O), E>> {
self.done_recv.recv().unwrap().map(move |result| {
match result {
Ok((r, o)) => {
let prev_rset = ::std::mem::replace(&mut self.current_recordset, r);
self.empty_send.send(prev_rset).ok(); // error: channel closed is not a problem, happens after calling stop()
Ok((&mut self.current_recordset, o))
}
Err(e) => Err(e),
}
})
}
}
#[macro_export]
macro_rules! parallel_record_impl {
($name:ident, $name_init:ident, $io_r:tt, $rdr:ty, $dataset:ty, $record:ty, $err:ty) => {
/// Function reading records in a different thread.
/// processing them in another worker thread
/// and finally returning the results to the main thread.
///
/// The output is passed around between threads, allowing
/// allocations to be 'recycled'. This also means, that the
/// data must implement `Default`, and data handled to the 'work'
/// function will receive 'old' data from earlier records which
/// has to be overwritten.
pub fn $name<$io_r, D, W, F, Out>(
reader: $rdr,
n_threads: u32,
queue_len: usize,
work: W,
mut func: F,
) -> Result<Option<Out>, $err>
where
$io_r: io::Read + Send,
D: Default + Send,
W: Send + Sync + Fn($record, &mut D),
F: FnMut($record, &mut D) -> Option<Out>,
{
$name_init(
n_threads,
queue_len,
|| Ok::<_, $err>(reader),
|| Ok::<_, $err>(D::default()),
|| Ok::<_, $err>(()),
|record, record_out, _| work(record, record_out),
|record, record_out, _| func(record, record_out),
)
}
/// More customisable function doing per-record processing with
/// closures for initialization and moer options.
///
/// The reader is lazily initialized in a closure (`reader_init`) and therefore does not
/// need to implement `Send`. There is also an initializer for the output data
/// for each record, therefore the type is not required to implement.
/// `Default` (`record_data_init`). Finally, each record set can have
/// its own data (kind of thread local data, but actually passed around
/// with the record set) (`rset_data_init`).
pub fn $name_init<Ri, E, $io_r, Er, Di, D, Ed, Si, S, Es, W, F, Out>(
n_threads: u32,
queue_len: usize,
reader_init: Ri,
record_data_init: Di,
rset_data_init: Si,
work: W,
mut func: F,
) -> Result<Option<Out>, E>
where
$io_r: io::Read,
Ri: Send + FnOnce() -> Result<$rdr, Er>,
Er: Send,
Ed: Send,
E: From<$err> + From<Er> + From<Ed> + From<Es>,
Di: Fn() -> Result<D, Ed> + Send + Sync,
D: Send,
Si: Fn() -> Result<S, Es> + Send + Sync,
S: Send,
W: Send + Sync + Fn($record, &mut D, &mut S),
F: FnMut($record, &mut D, &mut S) -> Option<Out>,
{
$crate::parallel::read_parallel_init::<_, E, _, _, _, _, Es, _, _, _>(
n_threads,
queue_len,
|| reader_init().map($crate::parallel::ReusableReader::<$rdr, (Vec<D>, S)>::new),
|| rset_data_init().map(|d| (<$dataset>::default(), (vec![], d))),
|&mut (ref mut recordset, (ref mut out, ref mut rset_data))| {
let mut record_iter = recordset.into_iter();
//let &mut (ref mut out, ref mut rset_data): &mut (Vec<D>, Option<S>) = &mut d.1;
for mut d in out.iter_mut().zip(&mut record_iter) {
work(d.1, &mut d.0, rset_data);
}
for record in record_iter {
out.push(record_data_init()?);
work(record, out.last_mut().unwrap(), rset_data);
}
Ok::<_, Ed>(())
},
|records| {
while let Some(result) = records.next() {
let (r, res) = result?;
res?;
let &mut (ref records, (ref mut out, ref mut rset_data)) = r;
for x in records.into_iter().zip(out.iter_mut()) {
if let Some(out) = func(x.0, x.1, rset_data) {
return Ok(Some(out));
}
}
}
Ok(None)
},
)?
}
};
}
parallel_record_impl!(
parallel_fasta,
parallel_fasta_init,
R,
fasta::Reader<R>,
fasta::RecordSet,
fasta::RefRecord,
fasta::Error
);
parallel_record_impl!(
parallel_fastq,
parallel_fastq_init,
R,
fastq::Reader<R>,
fastq::RecordSet,
fastq::RefRecord,
fastq::Error
);
/// Wrapper for `parallel::Reader` instances allowing
/// the output to be reused in order to save allocations.
/// Used by `parallel_fasta`/`parallel_fastq`
///
/// ```no_run
/// use seq_io::fastq::{Reader,Record,RecordSet};
/// use seq_io::parallel::{read_parallel,ReusableReader};
///
/// let inner = Reader::from_path("seqs.fastq").unwrap();
/// let reader = ReusableReader::new(inner);
///
/// read_parallel(reader, 4, 2, |&mut (ref record_set, ref mut out): &mut (RecordSet, Vec<bool>)| {
/// out.clear();
/// for record in record_set {
/// let found = record.seq().windows(3).position(|s| s == b"AAA").is_some();
/// out.push(found);
/// }
/// }, |record_sets| {
/// while let Some(result) = record_sets.next() {
/// let &(ref record_set, ref out) = &*result.unwrap().0;
/// for (record, found) in record_set.into_iter().zip(out) {
/// // ...
/// }
/// }
/// });
/// ```
pub struct ReusableReader<P, O>(P, PhantomData<O>);
impl<P, O> ReusableReader<P, O> {
pub fn new(p: P) -> ReusableReader<P, O> {
ReusableReader(p, PhantomData)
}
}
impl<P, O> Reader for ReusableReader<P, O>
where
P: Reader,
O: Send,
{
type DataSet = (P::DataSet, O);
type Err = P::Err;
fn fill_data(&mut self, data: &mut Self::DataSet) -> Option<Result<(), P::Err>> {
self.0.fill_data(&mut data.0)
}
}
/// Using this function currently does not work due to a
/// [compiler bug](https://github.com/rust-lang/rust/issues/42950).
///
/// `parallel_fasta`/`parallel_fastq` provide the same functionality for now
/// (implemented using `parallel_record_impl` macro)
pub fn parallel_records<R, O, W, F, Out>(
parser: R,
n_threads: u32,
queue_len: usize,
work: W,
mut func: F,
) -> Result<Option<Out>, R::Err>
where
R: Reader + Send,
for<'a> &'a R::DataSet: IntoIterator,
R::DataSet: Default,
O: Default + Send,
W: Send + Sync,
W: Fn(<&R::DataSet as IntoIterator>::Item, &mut O),
F: FnMut(<&R::DataSet as IntoIterator>::Item, &O) -> Option<Out>,
{
let reader = ReusableReader(parser, PhantomData);
read_parallel(
reader,
n_threads,
queue_len,
|d| {
let mut iter = d.0.into_iter();
let out: &mut Vec<O> = &mut d.1;
for mut x in out.iter_mut().zip(&mut iter) {
work(x.1, &mut x.0);
}
for i in iter {
out.push(O::default());
work(i, out.last_mut().unwrap())
}
},
|records| {
while let Some(result) = records.next() {
let (r, _) = result?;
for x in r.0.into_iter().zip(&r.1) {
if let Some(out) = func(x.0, x.1) {
return Ok(Some(out));
}
}
}
Ok(None)
},
)
}
// trait impls
use super::fasta;
impl<R, P> Reader for fasta::Reader<R, P>
where
R: io::Read,
P: super::policy::BufPolicy + Send,
{
type DataSet = fasta::RecordSet;
type Err = fasta::Error;
fn fill_data(&mut self, rset: &mut fasta::RecordSet) -> Option<Result<(), fasta::Error>> {
self.read_record_set(rset)
}
}
use super::fastq;
impl<R, P> Reader for fastq::Reader<R, P>
where
R: io::Read,
P: super::policy::BufPolicy + Send,
{
type DataSet = fastq::RecordSet;
type Err = fastq::Error;
fn fill_data(&mut self, rset: &mut fastq::RecordSet) -> Option<Result<(), fastq::Error>> {
self.read_record_set(rset)
}
}
|
use chrono::NaiveDateTime;
use sqlx::FromRow;
#[derive(Debug, FromRow)]
pub struct Submit {
pub id: i64,
pub user_id: i64,
pub problem_id: i64,
pub path: String,
pub status: String,
pub point: Option<i32>,
pub execution_time: Option<i32>,
pub execution_memory: Option<i32>,
pub compile_error: Option<String>,
pub lang: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub deleted_at: Option<NaiveDateTime>,
}
#[derive(Debug, FromRow)]
pub struct Problem {
pub id: i64,
pub slug: Option<String>,
pub name: Option<String>,
pub contest_id: Option<i64>,
pub writer_user_id: i64,
pub checker_path: Option<String>,
pub execution_time_limit: i32,
pub position: Option<String>,
pub uuid: Option<String>,
pub difficulty: String,
pub statement: String,
pub constraints: String,
pub input_format: String,
pub output_format: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub deleted_at: Option<NaiveDateTime>,
}
#[derive(Debug, FromRow)]
pub struct Testcase {
pub id: i64,
pub problem_id: i64,
pub name: Option<String>,
pub input: Option<String>,
pub output: Option<String>,
pub explanation: Option<String>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub deleted_at: Option<NaiveDateTime>,
}
|
use std::{env, fs::{create_dir, remove_dir_all}};
use std::str;
use std::vec::Vec;
use std::fs::File;
use libc;
use quick_xml::{Reader, events::{BytesStart, Event}};
use std::{ffi::CString, io::prelude::*};
use std::sync::{Arc, Mutex, atomic::{AtomicBool, AtomicI32, Ordering}};
use curl::easy::Easy;
fn process_ovf_xml_tag(evt: &BytesStart, disk_images: &mut Vec<String>) {
if evt.name() == b"File" {
match evt.attributes().find(|ref att| { att.as_ref().unwrap().key == b"ovf:href" }) {
Some(a) => {
// println!("{}", String::from_utf8(a.unwrap().value.to_vec()).unwrap());
if let Ok(image_name) = String::from_utf8(a.unwrap().value.to_vec()) {
disk_images.push(image_name);
} else {
println!("invalid filenames inside the File tag");
}
},
None => {
panic!("Cannot find ovf:href attribute for File tag");
}
}
}
}
fn parse_ovf(ovf_file: &str) -> Vec<String> {
println!("Parsing ovf description file {}", ovf_file);
let mut xml = Reader::from_file(ovf_file).expect(&format!("Cannot open file {}", ovf_file));
let mut buffer = Vec::new();
let mut disk_images = Vec::<String>::new();
loop {
match xml.read_event(&mut buffer) {
Ok(Event::Start(ref evt)) => {
// println!("event start {}", String::from_utf8(evt.name().to_vec()).unwrap());
process_ovf_xml_tag(evt, &mut disk_images);
},
Ok(Event::End(_)) => {
// println!("event end {}", String::from_utf8(evt.name().to_vec()).unwrap());
},
Ok(Event::Empty(ref evt)) => {
// println!("event empty {}", String::from_utf8(evt.name().to_vec()).unwrap());
process_ovf_xml_tag(evt, &mut disk_images);
}
Ok(Event::Eof) => {
break;
},
Err(_) => {
panic!("Error parsing xml at {}", xml.buffer_position());
},
_ => {}
}
buffer.clear();
}
return disk_images;
}
fn os_system(cmd: String) -> Result<(), ()> {
if unsafe { libc::system(CString::new(cmd).unwrap().as_ptr()) } != 0 {
Err(())
} else {
Ok(())
}
}
fn convert_all_images(prefix_name: &str, disk_images: Vec<String>) {
if disk_images.len() == 1 {
println!("Converting to {}.img", prefix_name);
let cmd = format!("qemu-img convert -O raw {}/{} {}.img", prefix_name, disk_images[0], prefix_name);
os_system(cmd).expect("Qemu fail to convert image");
} else {
let mut count = 0;
disk_images.into_iter().for_each(|ref filename| {
println!("Converting to {}-{}.img", prefix_name, count);
let cmd = format!("qemu-img convert -O raw {}/{} {}-{}.img", prefix_name, filename, prefix_name, count);
os_system(cmd).expect("Qemu fail to convert image");
count += 1;
})
}
}
fn download_and_convert_image(vendor: &str, name: &str, url: &str) -> Result<(), String> {
let prefix_name = Arc::new(format!("{}-{}", vendor, name));
let prefix_name_ref = prefix_name.clone();
let old_panic_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |info| {
if let Some(msg) = info.payload().downcast_ref::<String>() {
println!("Error: {}", msg);
}
println!("Removing working dir {}", prefix_name_ref.as_ref());
remove_dir_all(prefix_name_ref.as_ref()).unwrap();
old_panic_hook(info);
}));
let filename = format!("{}/ovf", &prefix_name);
println!("Saving temporary files to {}/", &prefix_name);
let mut easy = Easy::new();
easy.url(url).unwrap();
easy.follow_location(true).unwrap();
easy.progress(true).unwrap();
create_dir(prefix_name.as_ref()).expect(
&format!("Cannot create dir {}. Another pull with the same name in progress/failed?",
prefix_name.as_ref()));
let mut transfer = easy.transfer();
let f = Arc::new(Mutex::new(File::create(&filename).expect("Cannot open file")));
let has_error = Arc::new(AtomicBool::new(false));
let has_error_ref = has_error.clone();
let last_dlnow = AtomicI32::new(0);
transfer.progress_function(move |dltot, dlnow, _, _| {
let t = (dlnow / 1024. / 1024.) as i32;
if t > last_dlnow.load(Ordering::Acquire) {
last_dlnow.store(t, Ordering::Release);
print!("Download Progress {:6}M/{:6}M\r", t, (dltot / 1024. / 1024.) as i32);
std::io::stdout().flush().unwrap();
}
true
}).unwrap();
let fref = f.clone();
transfer.write_function(move |data| {
if let Ok(_) = fref.lock().unwrap().write(data) {} else {
has_error_ref.store(true, Ordering::Release);
};
Ok(data.len())
}).unwrap();
match transfer.perform() {
Ok(_) => {
if has_error.load(Ordering::Acquire) {
Err("Download error".to_string())
} else {
let cmd = format!("tar xf {}/ovf -C {}/", prefix_name.as_ref(), prefix_name.as_ref());
os_system(cmd).expect("Extracting box image failed");
// println!("{}", cmd);
convert_all_images(
prefix_name.as_ref(),
parse_ovf(&format!("{}/box.ovf", prefix_name.as_ref())));
remove_dir_all(prefix_name.as_ref()).unwrap();
Ok(())
}
}
Err(_) => {
remove_dir_all(prefix_name.as_ref()).unwrap();
Err("Cannot download image file".to_string())
}
}
}
fn parse_repo_name<'a>(repo_name: &'a str) -> (&'a str, &'a str) {
let name_vec = repo_name.split("/").collect::<Vec<&str>>();
if name_vec.len() != 2 {
panic!("Invalid repo name {}", repo_name);
}
let vendor = name_vec[0];
let name = name_vec[1];
(vendor, name)
}
fn pull_from_url(vendor: &str, name: &str, url: &str) {
let mut easy = Easy::new();
let dst = Arc::new(Mutex::new(Vec::new()));
let dst_closure = dst.clone();
easy.url(url.as_ref()).unwrap();
let mut transfer = easy.transfer();
transfer.write_function(|data| {
dst_closure.lock().unwrap().extend_from_slice(data);
Ok(data.len())
}).unwrap();
transfer.perform().unwrap();
let jobj = json::parse(str::from_utf8(dst.lock().unwrap().as_ref()).unwrap());
match jobj {
Ok(x) => {
let latest_providers = &x["versions"][0]["providers"];
if latest_providers.is_null() {
println!("Cannot find the latest version");
return;
}
let (parsed_vendor, parsed_name) =
parse_repo_name(x["name"].as_str().unwrap());
if parsed_vendor != vendor || parsed_name != name {
println!("{} is inconsistent with {}/{}",
latest_providers["name"].as_str().unwrap(),
vendor, name);
return;
}
for provider in latest_providers.members() {
if provider["name"] == "virtualbox" {
let image_url = provider["url"].as_str().unwrap();
println!("Downloading {}", image_url);
match download_and_convert_image(vendor, name, image_url) {
Ok(_) => {
println!("Pulled {}/{}", vendor, name);
}
Err(_) => {
println!("Failed to pull {}/{}", vendor, name);
}
}
break;
}
}
},
Err(_) => {
println!("Cannot parse {}", url);
}
}
}
fn pull_from_vagrant(vendor: &str, name: &str) {
let url = format!("https://app.vagrantup.com/{}/boxes/{}", vendor, name);
pull_from_url(vendor, name, url.as_ref());
}
pub fn pull() {
let args = env::args().skip(1).collect::<Vec<_>>();
let (vendor, name) = parse_repo_name(args[0].as_ref());
if args.len() == 1 {
pull_from_vagrant(vendor, name);
} else {
pull_from_url(vendor, name, args[1].as_ref());
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
//! This module contains shared functions for use by tests.
use std::sync::Arc;
use hash::HashMap;
use hash::HashSet;
use ir_core::constant::Constant;
use ir_core::instr;
use ir_core::BlockId;
use ir_core::Func;
use ir_core::FuncBuilder;
use ir_core::FunctionId;
use ir_core::HasEdges;
use ir_core::Instr;
use ir_core::LocId;
use ir_core::StringInterner;
/// Given a simple string-based CFG description, create a Func that matches it.
///
/// Each array entry looks like:
///
/// ("blockname", ["call_target1", "call_target2"], ["successor1", "successor2"])
///
/// where <instr count> is the number of non-terminal instructions to insert.
///
pub fn build_test_func<'a>(
testcase: &[(&str, Vec<&str>, Vec<&str>)],
) -> (Func<'a>, StringInterner) {
let mut strings = StringInterner::default();
let func = build_test_func_with_strings(testcase, &mut strings);
(func, strings)
}
pub fn build_test_func_with_strings<'a>(
testcase: &[(&str, Vec<&str>, Vec<&str>)],
strings: &mut StringInterner,
) -> Func<'a> {
// Create a function whose CFG matches testcase.
let loc = LocId::NONE;
let tmp_strings = Arc::new(StringInterner::read_only());
FuncBuilder::build_func(Arc::clone(&tmp_strings), |fb| {
let mut name_to_bid = HashMap::with_capacity_and_hasher(testcase.len(), Default::default());
for (i, (name, _, _)) in testcase.iter().enumerate() {
name_to_bid.insert(
*name,
if i == 0 {
Func::ENTRY_BID
} else {
fb.alloc_bid()
},
);
}
let null_iid = fb.emit_constant(Constant::Null);
for (name, call_targets, edges) in testcase {
fb.start_block(name_to_bid[name]);
fb.cur_block_mut().pname_hint = Some(name.to_string());
let e: Vec<BlockId> = edges
.iter()
.map(|block_name| match name_to_bid.get(block_name) {
Some(&x) => x,
None => panic!("No such block {}", block_name),
})
.collect();
for target in call_targets {
let target = FunctionId::from_str(target, strings);
fb.emit(Instr::simple_call(target, &[], loc));
}
let terminator = match e.len() {
0 => Instr::ret(null_iid, loc),
1 => Instr::jmp(e[0], loc),
2 => Instr::jmp_op(null_iid, instr::Predicate::NonZero, e[0], e[1], loc),
_ => panic!("unhandled edge count"),
};
fb.emit(terminator);
}
})
}
/// Structurally compare two Funcs.
pub fn assert_func_struct_eq<'a>(func_a: &Func<'a>, func_b: &Func<'a>, strings: &StringInterner) {
if let Err(e) = cmp_func_struct_eq(func_a, func_b) {
panic!(
"Function mismatch: {}\n{}\n{}",
e,
print::DisplayFunc(func_a, true, strings),
print::DisplayFunc(func_b, true, strings)
);
}
}
macro_rules! cmp_eq {
($a:expr, $b:expr, $($rest:tt)+) => {
if $a == $b {
Ok(())
} else {
Err(format!($($rest)+))
}
};
}
fn cmp_func_struct_eq<'a>(func_a: &Func<'a>, func_b: &Func<'a>) -> Result<(), String> {
let mut block_eq: HashSet<(BlockId, BlockId)> = HashSet::default();
let mut pending_cmp: Vec<(BlockId, BlockId)> = vec![(Func::ENTRY_BID, Func::ENTRY_BID)];
cmp_eq!(
func_a.params.len(),
func_b.params.len(),
"param length mismatch",
)?;
for (param_a, param_b) in func_a.params.iter().zip(func_b.params.iter()) {
match (
param_a.default_value.as_ref(),
param_b.default_value.as_ref(),
) {
(Some(dv_a), Some(dv_b)) => {
pending_cmp.push((dv_a.init, dv_b.init));
}
(None, None) => {}
_ => panic!("Mismatch in default value index"),
}
}
while let Some((bid_a, bid_b)) = pending_cmp.pop() {
block_eq.insert((bid_a, bid_b));
cmp_block_struct_eq(func_a, bid_a, func_b, bid_b)?;
let term_a = func_a.terminator(bid_a);
let term_b = func_b.terminator(bid_b);
cmp_eq!(
term_a.edges().len(),
term_b.edges().len(),
"mismatched terminator edge count",
)?;
for (edge_a, edge_b) in term_a.edges().iter().zip(term_b.edges().iter()) {
pending_cmp.push((*edge_a, *edge_b));
}
}
Ok(())
}
fn cmp_block_struct_eq<'a>(
func_a: &Func<'a>,
bid_a: BlockId,
func_b: &Func<'a>,
bid_b: BlockId,
) -> Result<(), String> {
let block_a = func_a.block(bid_a);
let block_b = func_b.block(bid_b);
cmp_eq!(
block_a.params.len(),
block_b.params.len(),
"block param len mismatch",
)?;
cmp_eq!(
block_a.iids.len(),
block_b.iids.len(),
"block iids len mismatch",
)?;
cmp_eq!(
&block_a.pname_hint,
&block_b.pname_hint,
"pname mismatch in ({}, {})",
bid_a,
bid_b
)?;
// TODO: check tcid
for (iid_a, iid_b) in block_a
.iids
.iter()
.copied()
.zip(block_b.iids.iter().copied())
{
cmp_eq!(
std::mem::discriminant(func_a.instr(iid_a)),
std::mem::discriminant(func_b.instr(iid_b)),
"instr mismatch",
)?;
}
Ok(())
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
CheckServiceProviderAvailability(#[from] check_service_provider_availability::Error),
#[error(transparent)]
LegacyPeerings_List(#[from] legacy_peerings::list::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
PeerAsns_Get(#[from] peer_asns::get::Error),
#[error(transparent)]
PeerAsns_CreateOrUpdate(#[from] peer_asns::create_or_update::Error),
#[error(transparent)]
PeerAsns_Delete(#[from] peer_asns::delete::Error),
#[error(transparent)]
PeerAsns_ListBySubscription(#[from] peer_asns::list_by_subscription::Error),
#[error(transparent)]
PeeringLocations_List(#[from] peering_locations::list::Error),
#[error(transparent)]
Peerings_Get(#[from] peerings::get::Error),
#[error(transparent)]
Peerings_CreateOrUpdate(#[from] peerings::create_or_update::Error),
#[error(transparent)]
Peerings_Update(#[from] peerings::update::Error),
#[error(transparent)]
Peerings_Delete(#[from] peerings::delete::Error),
#[error(transparent)]
Peerings_ListByResourceGroup(#[from] peerings::list_by_resource_group::Error),
#[error(transparent)]
Peerings_ListBySubscription(#[from] peerings::list_by_subscription::Error),
#[error(transparent)]
PeeringServiceLocations_List(#[from] peering_service_locations::list::Error),
#[error(transparent)]
Prefixes_Get(#[from] prefixes::get::Error),
#[error(transparent)]
Prefixes_CreateOrUpdate(#[from] prefixes::create_or_update::Error),
#[error(transparent)]
Prefixes_Delete(#[from] prefixes::delete::Error),
#[error(transparent)]
Prefixes_ListByPeeringService(#[from] prefixes::list_by_peering_service::Error),
#[error(transparent)]
PeeringServiceProviders_List(#[from] peering_service_providers::list::Error),
#[error(transparent)]
PeeringServices_Get(#[from] peering_services::get::Error),
#[error(transparent)]
PeeringServices_CreateOrUpdate(#[from] peering_services::create_or_update::Error),
#[error(transparent)]
PeeringServices_Update(#[from] peering_services::update::Error),
#[error(transparent)]
PeeringServices_Delete(#[from] peering_services::delete::Error),
#[error(transparent)]
PeeringServices_ListByResourceGroup(#[from] peering_services::list_by_resource_group::Error),
#[error(transparent)]
PeeringServices_ListBySubscription(#[from] peering_services::list_by_subscription::Error),
}
pub async fn check_service_provider_availability(
operation_config: &crate::OperationConfig,
check_service_provider_availability_input: &models::CheckServiceProviderAvailabilityInput,
subscription_id: &str,
) -> std::result::Result<String, check_service_provider_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/CheckServiceProviderAvailability",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(check_service_provider_availability::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(check_service_provider_availability::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(check_service_provider_availability_input)
.map_err(check_service_provider_availability::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(check_service_provider_availability::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(check_service_provider_availability::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: String = serde_json::from_slice(rsp_body)
.map_err(|source| check_service_provider_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| check_service_provider_availability::Error::DeserializeError(source, rsp_body.clone()))?;
Err(check_service_provider_availability::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod check_service_provider_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub mod legacy_peerings {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
peering_location: &str,
kind: &str,
subscription_id: &str,
) -> std::result::Result<models::PeeringListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/legacyPeerings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("peeringLocation", peering_location);
url.query_pairs_mut().append_pair("kind", kind);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Peering/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peer_asns {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
peer_asn_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PeerAsn, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peerAsns/{}",
operation_config.base_path(),
subscription_id,
peer_asn_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeerAsn =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
peer_asn_name: &str,
peer_asn: &models::PeerAsn,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peerAsns/{}",
operation_config.base_path(),
subscription_id,
peer_asn_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(peer_asn).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeerAsn = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PeerAsn = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PeerAsn),
Created201(models::PeerAsn),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
peer_asn_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peerAsns/{}",
operation_config.base_path(),
subscription_id,
peer_asn_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PeerAsnListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peerAsns",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeerAsnListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peering_locations {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
kind: &str,
direct_peering_type: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PeeringLocationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peeringLocations",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("kind", kind);
if let Some(direct_peering_type) = direct_peering_type {
url.query_pairs_mut().append_pair("directPeeringType", direct_peering_type);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringLocationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peerings {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_name: &str,
subscription_id: &str,
) -> std::result::Result<models::Peering, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peerings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Peering =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_name: &str,
peering: &models::Peering,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peerings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(peering).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Peering = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::Peering = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Peering),
Created201(models::Peering),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_name: &str,
tags: &models::ResourceTags,
subscription_id: &str,
) -> std::result::Result<models::Peering, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peerings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(tags).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::Peering =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peerings/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PeeringListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peerings",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PeeringListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peerings",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peering_service_locations {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PeeringServiceLocationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peeringServiceLocations",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServiceLocationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod prefixes {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
prefix_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PeeringServicePrefix, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}/prefixes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name,
prefix_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServicePrefix =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
prefix_name: &str,
peering_service_prefix: &models::PeeringServicePrefix,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}/prefixes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name,
prefix_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(peering_service_prefix).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServicePrefix = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServicePrefix = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PeeringServicePrefix),
Created201(models::PeeringServicePrefix),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
prefix_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}/prefixes/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name,
prefix_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_peering_service(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::PeeringServicePrefixListResult, list_by_peering_service::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}/prefixes",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_peering_service::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_peering_service::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_peering_service::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_peering_service::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServicePrefixListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_peering_service::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_peering_service::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_peering_service::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_peering_service {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peering_service_providers {
use super::{models, API_VERSION};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PeeringServiceProviderListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peeringServiceProviders",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServiceProviderListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod peering_services {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PeeringService, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringService =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
peering_service: &models::PeeringService,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(peering_service).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringService = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create_or_update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::PeeringService),
Created201(models::PeeringService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
tags: &models::ResourceTags,
subscription_id: &str,
) -> std::result::Result<models::PeeringService, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(tags).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringService =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
peering_service_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
peering_service_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::PeeringServiceListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Peering/peeringServices",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServiceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_resource_group::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::PeeringServiceListResult, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Peering/peeringServices",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_subscription::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_subscription::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PeeringServiceListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::ErrorResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_by_subscription::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use amethyst::{assets::PrefabLoader, core::Transform, ecs::prelude::*, prelude::*, renderer::*};
use amethyst_gltf::{GltfPrefab, GltfSceneFormat, GltfSceneOptions};
use components::*;
use core::math::*;
use core::ServerMessageBody;
use {state::MainState, GltfCache, PlayerLookup, ReadConnection};
/// Game state that waits for the init message from the server.
///
/// This is the first state for the game and is used to initialize the `Application`. Its only
/// job is to poll the connection until the `Init` message is received, discarding any other
/// messages. Upon receiving the init message, it creates the components for the local world
/// state, then transitions to the `MainState`.
#[derive(Debug)]
pub struct InitState;
impl SimpleState for InitState {
fn on_start(&mut self, data: StateData<GameData>) {
trace!("InitState::on_start()");
let world = data.world;
world.add_resource(PlayerLookup::default());
world.register::<PlayerEntities>();
world.register::<PlayerPitch>();
#[derive(SystemData)]
struct Data<'a> {
loader: PrefabLoader<'a, GltfPrefab>,
gltf_cache: Write<'a, GltfCache>,
}
world.exec(|mut data: Data| {
trace!("Loading biped.gltf");
let biped_handle = data.loader.load(
"biped.gltf",
GltfSceneFormat,
GltfSceneOptions {
generate_tex_coords: (0.1, 0.1),
load_animations: true,
flip_v_coord: true,
scene_index: None,
},
(),
);
data.gltf_cache.insert("biped".into(), biped_handle);
trace!("Loading revolver.gltf");
let revolver_handle = data.loader.load(
"revolver/revolver-python.gltf",
GltfSceneFormat,
GltfSceneOptions {
generate_tex_coords: (0.1, 0.1),
load_animations: true,
flip_v_coord: true,
scene_index: None,
},
(),
);
data.gltf_cache.insert("revolver".into(), revolver_handle);
trace!("Loading bullet-9mm.gltf");
let bullet_handle = data.loader.load(
"revolver/bullet-9mm.gltf",
GltfSceneFormat,
GltfSceneOptions {
generate_tex_coords: (0.1, 0.1),
load_animations: true,
flip_v_coord: true,
scene_index: None,
},
(),
);
data.gltf_cache.insert("bullet".into(), bullet_handle);
});
world
.create_entity()
.with(Transform::from(Vector3::new(6.0, 6.0, -6.0)))
.with(Light::from(PointLight {
intensity: 6.0,
color: [0.8, 0.0, 0.0].into(),
..PointLight::default()
}))
.build();
world
.create_entity()
.with(Transform::from(Vector3::new(0.0, 4.0, 4.0)))
.with(Light::from(PointLight {
intensity: 5.0,
color: [0.0, 0.3, 0.7].into(),
..PointLight::default()
}))
.build();
world.add_resource(AmbientColor(Rgba(0.2, 0.2, 0.2, 0.2)));
}
fn update(&mut self, data: &mut StateData<GameData>) -> SimpleTrans {
trace!("InitState::update()");
#[derive(SystemData)]
struct Data<'a> {
connection: ReadConnection<'a>,
entities: Entities<'a>,
updater: Read<'a, LazyUpdate>,
gltf_cache: Read<'a, GltfCache>,
player_lookup: Write<'a, PlayerLookup>,
}
// Listen for the `Init` message. Once we receive it, we can initialize the local state
// and then switch to the main game state.
let trans = data.world.exec(|mut data: Data| {
for message in data.connection.try_iter() {
match message.body {
ServerMessageBody::Init { id, world } => {
trace!("Received init message, id {:#x}", id);
let biped = data.gltf_cache.get("biped").expect("No biped model");
let revolver = data.gltf_cache.get("revolver").expect("No revolver model");
// Initialize the local state for each of the players.
for (_, player) in world.players {
let is_local = player.id == id;
::build_player(
&data.updater,
&data.entities,
&mut data.player_lookup,
player,
is_local,
biped,
revolver,
);
}
// Once we've initialized the local state, switch to the main game state
// which handles the core logic for the game.
return Trans::Switch(Box::new(MainState { id, frame: 0 }));
}
_ => trace!("Discarding while waiting for `Init`: {:?}", message),
}
}
Trans::None
});
trans
}
}
|
use crate::backend::RequestOptions;
use crate::{
backend::{Backend, BackendInformation, Token},
components::placeholder::Placeholder,
data::{SharedDataBridge, SharedDataOps},
error::error,
page::AppPage,
preferences::Preferences,
};
use anyhow::Error;
use chrono::{DateTime, Utc};
use drogue_cloud_console_common::{EndpointInformation, UserInfo};
use patternfly_yew::*;
use std::{rc::Rc, time::Duration};
use url::Url;
use wasm_bindgen::JsValue;
use yew::{
format::{Json, Nothing},
prelude::*,
services::{
fetch::{Request, *},
timeout::*,
},
utils::window,
};
pub struct Main {
link: ComponentLink<Self>,
access_code: Option<String>,
task: Option<FetchTask>,
refresh_task: Option<TimeoutTask>,
token_holder: SharedDataBridge<Option<Token>>,
/// Something failed, we can no longer work.
app_failure: bool,
/// We are in the process of authenticating.
authenticating: bool,
endpoints: Option<EndpointInformation>,
}
#[derive(Debug, Clone)]
pub enum Msg {
/// Trigger fetching the endpoint information
FetchBackend,
/// Failed to fetch endpoint information
FetchBackendFailed,
/// Trigger an overall application failure
AppFailure(Toast),
/// Set the backend information
Backend(BackendInformation),
/// Set the endpoint information
Endpoints(Rc<EndpointInformation>),
/// Exchange the authentication code for an access token
GetToken(String),
/// Share the access token using the data bridge
ShareAccessToken(Option<Token>),
/// Set the access token
SetAccessToken(Option<Token>),
/// Callback when fetching the token failed
FetchTokenFailed,
RetryLogin,
/// Send to trigger refreshing the access token
RefreshToken(Option<String>),
/// Trigger logout
Logout,
}
impl Component for Main {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
link.send_message(Msg::FetchBackend);
let location = window().location();
let url = Url::parse(&location.href().unwrap()).unwrap();
log::debug!("href: {:?}", url);
let code = url.query_pairs().find_map(|(k, v)| {
if k == "code" {
Some(v.to_string())
} else {
None
}
});
let error = url.query_pairs().find_map(|(k, v)| {
if k == "error" {
Some(v.to_string())
} else {
None
}
});
log::debug!("Access code: {:?}", code);
log::debug!("Login error: {:?}", error);
if let Some(error) = error {
link.send_message(Msg::AppFailure(Toast {
title: "Failed to log in".into(),
body: html! {<p>{error}</p>},
r#type: Type::Danger,
actions: vec![link.callback(|_| Msg::RetryLogin).into_action("Retry")],
..Default::default()
}));
}
// remove code, state and others from the URL bar
{
let mut url = url;
url.query_pairs_mut().clear();
let url = url.as_str().trim_end_matches('?');
window()
.history()
.unwrap()
.replace_state_with_url(&JsValue::NULL, "Drogue IoT", Some(url))
.ok();
}
let token_holder = SharedDataBridge::from(&link, Msg::SetAccessToken);
Self {
link,
access_code: code,
task: None,
refresh_task: None,
app_failure: false,
authenticating: false,
token_holder,
endpoints: None,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
log::info!("Message: {:?}", msg);
match msg {
Msg::FetchBackend => {
self.task = Some(
self.fetch_backend()
.expect("Failed to get backend information"),
);
true
}
Msg::Backend(backend) => {
log::info!("Got backend: {:?}", backend);
Backend::set(Some(backend));
self.task = None;
if !self.app_failure {
if let Some(access_code) = self.access_code.take() {
// exchange code for token if we have a code and no app failure
log::info!("Exchange access code for token");
self.authenticating = true;
self.link.send_message(Msg::GetToken(access_code));
} else if let Some(refresh) = Preferences::load()
.ok()
.and_then(|prefs| prefs.refresh_token)
{
log::info!("Re-using existing refresh token");
self.authenticating = true;
// try using existing refresh token
self.link.send_message(Msg::RefreshToken(Some(refresh)))
}
}
true
}
Msg::Endpoints(endpoints) => {
log::info!("Got endpoints: {:?}", endpoints);
self.endpoints =
Some(Rc::try_unwrap(endpoints).unwrap_or_else(|err| (*err).clone()));
self.task = None;
true
}
Msg::FetchBackendFailed => {
error(
"Failed to fetch backend information",
"Could not retrieve information for connecting to the backend.",
);
true
}
Msg::AppFailure(toast) => {
ToastDispatcher::default().toast(toast);
self.app_failure = true;
true
}
Msg::FetchTokenFailed => {
self.authenticating = false;
true
}
Msg::RetryLogin => {
Backend::update_token(None);
if Backend::reauthenticate().is_err() {
error(
"Failed to log in",
"No backed information present. Unable to trigger login.",
);
}
false
}
Msg::GetToken(access_code) => {
// get the access token from the code
// this can only be called once the backend information
if Backend::get().is_some() {
self.task = Some(
self.fetch_token(&access_code)
.expect("Failed to create request"),
);
} else {
self.access_code = Some(access_code);
}
true
}
Msg::ShareAccessToken(token) => {
self.token_holder.set(token);
false
}
Msg::SetAccessToken(Some(token)) => {
log::info!("Token: {:?}", token);
self.task = None;
self.authenticating = false;
Preferences::update_or_default(|mut prefs| {
prefs.refresh_token = token.refresh_token.as_ref().cloned();
prefs.id_token = token.id_token.clone();
prefs.user_info = token.userinfo.as_ref().cloned();
Ok(prefs)
})
.map_err(|err| {
log::warn!("Failed to store preferences: {}", err);
err
})
.ok();
Backend::update_token(Some(token.clone()));
if let Some(timeout) = token.valid_for() {
log::info!("Token expires in {:?}", timeout);
let mut rem = (timeout.as_secs() as i64) - 30;
if rem < 0 {
// ensure we are non-negative
rem = 0;
}
if rem < 30 {
// refresh now
log::debug!("Scheduling refresh now (had {} s remaining)", rem);
self.link
.send_message(Msg::RefreshToken(token.refresh_token.as_ref().cloned()));
} else {
log::debug!("Scheduling refresh in {} seconds", rem);
let refresh_token = token.refresh_token.as_ref().cloned();
self.refresh_task = Some(TimeoutService::spawn(
Duration::from_secs(rem as u64),
self.link.callback_once(move |_| {
log::info!("Token timer expired, refreshing...");
Msg::RefreshToken(refresh_token)
}),
));
}
} else {
log::debug!("Token has no expiration set");
}
// fetch endpoints
if self.endpoints.is_none() {
self.task = Some(self.fetch_endpoints().expect("Failed to fetch endpoints"));
}
// done
true
}
Msg::SetAccessToken(None) => true,
Msg::RefreshToken(refresh_token) => {
log::info!("Refreshing access token");
match refresh_token {
Some(refresh_token) => {
self.task = match self.refresh_token(&refresh_token) {
Ok(task) => Some(task),
Err(_) => {
Backend::reauthenticate().ok();
None
}
}
}
None => {
Backend::reauthenticate().ok();
}
}
true
}
Msg::Logout => {
Preferences::update_or_default(|mut prefs| {
prefs.refresh_token = None;
prefs.id_token = Default::default();
prefs.user_info = None;
Ok(prefs)
})
.ok();
Backend::logout().ok();
false
}
}
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
return html! {
<>
<BackdropViewer/>
<ToastViewer/>
{
if let Some(ready) = self.is_ready() {
html!{
<AppPage
backend=ready.0
token=ready.1
endpoints=ready.2
on_logout=self.link.callback(|_|Msg::Logout)
/>
}
} else if self.need_login() {
html!{ <Placeholder/> }
} else {
html!{}
}
}
</>
};
}
}
impl Main {
/// Check if the app and backend are ready to show the application.
fn is_ready(&self) -> Option<(Backend, Token, EndpointInformation)> {
match (
self.app_failure,
Backend::get(),
Backend::token(),
self.endpoints.as_ref().cloned(),
) {
(true, ..) => None,
(false, Some(backend), Some(token), Some(endpoints)) => {
Some((backend, token, endpoints))
}
_ => None,
}
}
fn need_login(&self) -> bool {
!self.app_failure && Backend::get().is_some() && !self.authenticating
}
fn fetch_backend(&self) -> Result<FetchTask, anyhow::Error> {
let req = Request::get("/endpoints/backend.json").body(Nothing)?;
let opts = FetchOptions {
cache: Some(Cache::NoCache),
..Default::default()
};
FetchService::fetch_with_options(
req,
opts,
self.link.callback(
|response: Response<Json<Result<BackendInformation, Error>>>| {
log::info!("Backend: {:?}", response);
if let (meta, Json(Ok(body))) = response.into_parts() {
if meta.status.is_success() {
return Msg::Backend(body);
}
}
Msg::FetchBackendFailed
},
),
)
}
fn fetch_endpoints(&self) -> Result<FetchTask, anyhow::Error> {
Backend::request_with(
Method::GET,
"/api/console/v1alpha1/info",
Nothing,
RequestOptions {
disable_reauth: true,
},
self.link.callback(
|response: Response<Json<Result<EndpointInformation, Error>>>| {
let parts = response.into_parts();
if let (meta, Json(Ok(body))) = parts {
log::info!("Meta: {:?}", meta);
if meta.status.is_success() {
return Msg::Endpoints(Rc::new(body));
}
}
Msg::FetchBackendFailed
},
),
)
}
fn refresh_token(&self, refresh_token: &str) -> Result<FetchTask, anyhow::Error> {
let mut url = Backend::url("/api/console/v1alpha1/ui/refresh")
.ok_or_else(|| anyhow::anyhow!("Missing backend information"))?;
url.query_pairs_mut()
.append_pair("refresh_token", refresh_token);
let req = Request::get(url.to_string()).body(Nothing)?;
let opts = FetchOptions {
cache: Some(Cache::NoCache),
..Default::default()
};
FetchService::fetch_with_options(
req,
opts,
self.link.callback(
|response: Response<Json<Result<serde_json::Value, Error>>>| {
log::info!("Response from refreshing token: {:?}", response);
Self::from_response(response, true)
},
),
)
}
fn fetch_token<S: AsRef<str>>(&self, access_code: S) -> Result<FetchTask, anyhow::Error> {
let mut url = Backend::url("/api/console/v1alpha1/ui/token")
.ok_or_else(|| anyhow::anyhow!("Missing backend information"))?;
url.query_pairs_mut()
.append_pair("code", access_code.as_ref());
let req = Request::get(url.to_string()).body(Nothing)?;
let opts = FetchOptions {
cache: Some(Cache::NoCache),
..Default::default()
};
FetchService::fetch_with_options(
req,
opts,
self.link.callback(
|response: Response<Json<Result<serde_json::Value, Error>>>| {
log::info!("Code to token response: {:?}", response);
Self::from_response(response, false)
},
),
)
}
fn from_response(
response: Response<Json<Result<serde_json::Value, Error>>>,
is_refresh: bool,
) -> Msg {
if let (meta, Json(Ok(value))) = response.into_parts() {
if meta.status.is_success() {
let access_token = value["bearer"]["access_token"]
.as_str()
.map(|s| s.to_string());
let refresh_token = value["bearer"]["refresh_token"]
.as_str()
.map(|s| s.to_string());
let id_token = value["bearer"]["id_token"].as_str().map(|s| s.to_string());
let userinfo: Option<UserInfo> =
serde_json::from_value(value["userinfo"].clone()).unwrap_or_default();
let expires = match value["expires"].as_str() {
Some(expires) => DateTime::parse_from_rfc3339(expires).ok(),
None => None,
}
.map(|expires| expires.with_timezone(&Utc));
let token = match (access_token, id_token) {
(Some(access_token), Some(id_token)) => {
if !is_refresh {
Some(Token {
access_token,
refresh_token,
id_token,
expires,
userinfo,
})
} else if let Ok(prefs) = Preferences::load() {
Some(Token {
access_token,
refresh_token,
id_token: prefs.id_token,
expires,
userinfo: prefs.user_info,
})
} else {
None
}
}
_ => None,
};
log::info!("Token: {:?}", token);
match token {
Some(token) => Msg::ShareAccessToken(Some(token)),
None => Msg::FetchTokenFailed,
}
} else {
Msg::FetchTokenFailed
}
} else {
Msg::FetchTokenFailed
}
}
}
|
use num_bigint::BigUint;
use num_traits::cast::{FromPrimitive, ToPrimitive};
use orion::aead;
#[allow(dead_code)]
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::io::prelude::*;
use blake2::{Blake2b, Digest};
use zeroize::Zeroize;
const ENCRYPTION_SALT: [u8; 64] = [
0xe3, 0x1a, 0x0c, 0x9b, 0x6b, 0x01, 0xbe, 0x19, 0xc5, 0x44, 0x7f, 0xb9, 0x2f, 0x79, 0x94, 0x91,
0xcf, 0xae, 0xb6, 0xda, 0x09, 0x0c, 0x24, 0xf3, 0x0f, 0xab, 0x2b, 0xf2, 0x4a, 0x1c, 0x39, 0xf7,
0xc1, 0xfc, 0xdc, 0x61, 0xc3, 0xf3, 0x15, 0xcf, 0x64, 0x76, 0x96, 0x25, 0xf9, 0xe6, 0xb1, 0x18,
0x62, 0xbd, 0x03, 0x6a, 0x67, 0x2d, 0xbb, 0x42, 0x1c, 0xbb, 0xb3, 0x24, 0x83, 0x5f, 0x7e, 0x53,
];
const MASTER_PASS_SALT: [u8; 64] = [
0xa1, 0x48, 0x48, 0x5a, 0x76, 0x31, 0xe5, 0x45, 0x65, 0xf4, 0xde, 0xb0, 0xbb, 0x3a, 0x8f, 0xcc,
0xaa, 0x35, 0xff, 0x87, 0x7c, 0xd5, 0xcd, 0x4c, 0x4a, 0xbb, 0xbe, 0x21, 0x56, 0x5b, 0xe2, 0x7e,
0x60, 0x70, 0xd6, 0x5c, 0x0e, 0x3a, 0xa6, 0x02, 0xf9, 0xa1, 0xc9, 0x37, 0x88, 0x2a, 0xe0, 0xdc,
0x06, 0xcc, 0x25, 0xa6, 0x05, 0x8d, 0x75, 0x91, 0xc5, 0xdb, 0x0d, 0x90, 0xdb, 0xf3, 0x05, 0x8f,
];
type Result<T> = std::result::Result<T, Box<std::error::Error>>;
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Ranpaman {
master_password: Vec<u8>,
encryption_key: Vec<u8>,
file_path: Option<String>,
data: HashMap<(String, String), Settings>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Settings {
include_special_characters: bool,
revision: u32,
password_length: u32,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
include_special_characters: true,
revision: 0,
password_length: 30,
}
}
}
impl Drop for Ranpaman {
fn drop(&mut self) {
self.master_password.zeroize();
self.encryption_key.zeroize();
}
}
impl Ranpaman {
pub fn new(mut master_password: String, file_path: Option<String>) -> Ranpaman {
let config = argon2::Config::default();
let pw = argon2::hash_raw(&master_password.as_bytes(), &MASTER_PASS_SALT, &config).unwrap();
let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap();
master_password.zeroize();
Ranpaman {
master_password: pw,
encryption_key: key,
file_path,
data: HashMap::new(),
}
}
pub fn add_account(
&mut self,
login: String,
service_name: String,
settings: Settings,
) -> Result<()> {
if service_name.is_empty() || login.is_empty() || settings.password_length < 4 {
//TODO: Return an error here
}
let key = (service_name, login);
if self.data.contains_key(&key) {
//TODO: Return an error here
} else {
self.data.insert(key, settings);
}
Ok(())
}
pub fn get_password(&self, login: String, service_name: String) -> Result<String> {
match self
.data
.get(&(service_name.to_string(), login.to_string()))
{
Some(settings) => {
let salt: &[u8] = &[
login.as_bytes(),
service_name.as_bytes(),
&settings.revision.to_le_bytes(),
]
.concat(); //TODO: Add login, service_name etc to salt
let argon_config = argon2::Config::default();
let hash = argon2::hash_raw(&self.master_password, salt, &argon_config).unwrap();
let char_sets = generate_character_sets(settings);
return encode_password(&hash, char_sets, settings.password_length as usize);
}
None => {
//TODO: Return an error here
Ok(String::from(""))
}
}
}
pub fn change_file_path(&mut self, new_path: Option<String>) -> Result<()> {
match new_path {
None => {
if let Some(old_path) = &self.file_path {
std::fs::remove_file(old_path)?;
self.file_path = None;
}
}
Some(new_path) => {
let mut new_file = std::fs::File::create(&new_path)?;
if let Some(old_path) = &self.file_path {
std::fs::remove_file(old_path)?;
}
self.file_path = Some(new_path);
let encoded_self = bincode::serialize(&self).unwrap();
let encrypted_self = aead::seal(
&aead::SecretKey::from_slice(&self.encryption_key).unwrap(),
&encoded_self,
)
.unwrap();
new_file.write(&encrypted_self)?;
}
}
Ok(())
}
pub fn write_to_file(&self) -> Result<()> {
let encoded_self = bincode::serialize(&self).unwrap();
let encrypted_self = aead::seal(
&aead::SecretKey::from_slice(&self.encryption_key).unwrap(),
&encoded_self,
)
.unwrap();
std::fs::write(
self.file_path.as_ref().ok_or("No file path specified")?,
encrypted_self,
)?;
Ok(())
}
pub fn read_from_file(mut master_password: String, path: &str) -> Result<Ranpaman> {
let read = std::fs::read(path)?;
let config = argon2::Config::default();
let key = argon2::hash_raw(&master_password.as_bytes(), &ENCRYPTION_SALT, &config).unwrap();
master_password.zeroize();
let decrypted = aead::open(&aead::SecretKey::from_slice(&key).unwrap(), &read).unwrap();
Ok(bincode::deserialize(&decrypted)?)
}
pub fn get_file_path(&self) -> Option<&String> {
self.file_path.as_ref()
}
}
fn generate_character_sets(settings: &Settings) -> Vec<Vec<char>> {
let mut char_sets = Vec::new();
char_sets.push((b'A'..=b'Z').map(char::from).collect());
char_sets.push((b'a'..=b'z').map(char::from).collect());
if settings.include_special_characters {
char_sets.push(vec!['1', '2', '3', '4', '5', '6', '7', '8', '9']);
char_sets.push(vec!['%', '&', '#', '$', '+', '-', '@']);
}
char_sets
}
fn encode_password(
raw_password: &[u8],
char_sets: Vec<Vec<char>>,
length: usize,
) -> Result<String> {
//Validate char_sets
if char_sets.iter().any(|set| set.is_empty()) {
//TODO: Return error here
}
let mut entropy = BigUint::from_bytes_le(raw_password);
let mut char_set_use_flags: Vec<bool> = char_sets.iter().map(|_| false).collect();
let set_length = char_sets.iter().map(|set| set.len()).sum();
let mut encoded_password = String::new();
while encoded_password.len() < length {
if entropy < BigUint::from_usize(set_length).unwrap() {
//TODO: Return error here
}
let new_char: usize = (entropy.clone() % set_length).to_usize().unwrap();
entropy /= set_length;
let mut collective_length = 0;
for (index, set) in char_sets.iter().enumerate() {
if new_char < set.len() + collective_length {
encoded_password.push(set[new_char - collective_length]);
char_set_use_flags[index] = true;
break;
}
collective_length += set.len();
}
}
if char_set_use_flags.into_iter().all(|flag| flag){
return Ok(encoded_password);
}else{
// If the currently encoded password doesn't have at least one
// character from each character set, recursively add another
// round of hashing to the raw password and try again
let mut hasher = Blake2b::new();
hasher.input(raw_password);
return encode_password(&hasher.result(), char_sets, length);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn service_password_generation() {
let mut ranpaman = Ranpaman::new("masterpass".to_string(), None);
let site = String::from("somesite.com");
let mail = String::from("someone@somemail.com");
let settings = Settings::default();
ranpaman
.add_account(site.clone(), mail.clone(), settings)
.unwrap();
let password = ranpaman.get_password(site, mail).unwrap();
assert_eq!("#DnLScQHt4zu%QDLqP$7VD535UjExb", password);
}
#[test]
fn key_generation() {
let ranpaman = Ranpaman::new("masterpass".to_string(), None);
assert_eq!(
ranpaman.master_password,
[
223, 108, 222, 141, 127, 89, 120, 143, 166, 127, 41, 255, 155, 5, 5, 195, 198, 186,
182, 18, 209, 221, 182, 64, 164, 34, 27, 230, 196, 48, 187, 237
]
);
assert_eq!(
ranpaman.encryption_key,
[
110, 249, 117, 224, 82, 86, 66, 21, 42, 235, 243, 204, 137, 226, 46, 12, 116, 161,
243, 48, 201, 170, 187, 179, 80, 147, 37, 111, 124, 108, 191, 182
]
);
}
#[test]
fn read_write() {
let path = "read_write_test_file";
let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string()));
ranpaman.write_to_file().unwrap();
let decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap();
std::fs::remove_file(path).unwrap();
assert_eq!(ranpaman, decoded);
}
#[test]
fn change_file_path() {
let path = "change_file_path_test_file";
let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string()));
ranpaman.write_to_file().unwrap();
let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), path).unwrap();
let new_path = "change_file_path_other_test_file";
decoded
.change_file_path(Some(new_path.to_string()))
.unwrap();
let mut decoded = Ranpaman::read_from_file("masterpass".to_string(), new_path).unwrap();
decoded.change_file_path(Some(path.to_string())).unwrap();
std::fs::remove_file(path).unwrap();
assert_eq!(ranpaman, decoded);
}
#[test]
fn get_file_path() {
let path = "get_file_path_test_file";
let ranpaman = Ranpaman::new("masterpass".to_string(), Some(path.to_string()));
assert_eq!(ranpaman.get_file_path(), Some(&path.to_string()));
}
}
|
//! ๐ฆ BATS! ๐ฆ
#![doc(
html_logo_url = "https://raw.githubusercontent.com/tarcieri/bats/batcave/img/cutebat.png",
html_root_url = "https://docs.rs/bats/0.10.31"
)]
#![warn(missing_docs, rust_2018_idioms)]
use crossterm::{cursor, terminal};
use gumdrop::Options;
use rand::{thread_rng, Rng};
use std::{thread::sleep, time::Duration};
/// Bat!
pub const BAT: char = '๐ฆ';
/// Crab!
pub const CRAB: char = '๐ฆ';
/// Halloween things
pub const HALLOWEEN_THINGS: &[char] = &['๐ง', '๐ฆ', '๐', '๐ป', '๐', '๐ธ', '๐ฎ', '๐'];
/// Bats! A spooky bat printer
#[derive(Debug, Default, Options)]
pub struct Bats {
/// Help
#[options(help = "print help message")]
help: bool,
/// Character to print
#[options(short = "c", long = "char", help = "character to print")]
char: Option<char>,
/// Draw crabs
#[options(long = "crab", help = "draw crabs")]
crab: bool,
/// Speed
#[options(
short = "s",
long = "speed",
help = "speed factor (default 15, max 255)"
)]
pub speed: Option<u8>,
/// Enable Halloween mode
#[options(long = "halloween", help = "enable halloween mode")]
pub halloween: bool,
}
impl Bats {
/// Run the program
pub fn run(&self) {
println!("๐ฆ BATS! ๐ฆ");
sleep(Duration::from_millis(250));
terminal().clear(terminal::ClearType::All).unwrap();
let cursor = cursor();
cursor.hide().unwrap();
cursor.goto(0, 0).unwrap();
let thing = self.thing_to_draw();
let is_halloween = self.is_it_halloween();
loop {
if is_halloween {
self.draw_halloween();
} else {
self.draw(thing);
}
}
}
/// Draw a random halloweeny-thing
pub fn draw_halloween(&self) {
self.draw(HALLOWEEN_THINGS[thread_rng().gen_range(0, HALLOWEEN_THINGS.len())]);
}
/// Draw an arbitrary string
pub fn draw(&self, thing: char) {
let mut rng = thread_rng();
let terminal = terminal();
let (term_width, term_height) = terminal.size().unwrap();
let cursor = cursor();
let y_position = cursor.pos().unwrap().1;
let start_pos = term_width - 2;
let end_pos = rng.gen_range(0, start_pos);
let mut delay = u64::from(10 + (start_pos - end_pos) * 2);
let delay_scale = rng.gen_range(self.speed_factor(), self.speed_factor() * 2);
for pos in (end_pos..start_pos).rev() {
cursor.goto(pos, y_position).unwrap();
terminal.write(thing).unwrap();
sleep(Duration::from_millis(delay / delay_scale));
delay -= 1;
if pos != end_pos {
cursor.goto(pos, y_position).unwrap();
terminal.write(" ").unwrap();
}
}
if y_position < term_height - 1 {
cursor.goto(0, y_position + 1).unwrap();
} else {
terminal.clear(terminal::ClearType::All).unwrap();
cursor.goto(0, 0).unwrap();
}
sleep(Duration::from_millis(256 / self.speed_factor()));
}
/// Character to draw
fn thing_to_draw(&self) -> char {
if self.crab {
if self.char.is_none() {
CRAB
} else {
panic!("both --char and --crab options passed");
}
} else {
self.char.unwrap_or(BAT)
}
}
/// Get the current speed factor
fn speed_factor(&self) -> u64 {
u64::from(self.speed.unwrap_or(15))
}
/// Is it halloween?
fn is_it_halloween(&self) -> bool {
if self.halloween {
return true;
}
use chrono::Datelike;
let today = chrono::Local::today();
today.month() == 10 && today.day() == 31
}
}
|
use crate::emoji;
use crate::user::User;
pub fn whoami(user: &User) {
let user = &user.data;
println!(
"{} You are logged with the email '{}'.",
emoji::WAVING,
user.email
);
}
|
#[doc = "Reader of register CLK_CAL_CNT2"]
pub type R = crate::R<u32, super::CLK_CAL_CNT2>;
#[doc = "Reader of field `CAL_COUNTER2`"]
pub type CAL_COUNTER2_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:23 - Up-counter clocked on fast DDFT output #1 (see TST_DDFT_FAST_CTL). When CLK_CAL_CNT1.CAL_COUNTER_DONE==1, the counter is stopped and can be read by SW. Do not read this value unless CAL_COUNTER_DONE==1. The expected final value is related to the ratio of clock frequencies used for the two counters and the value loaded into counter 1: CLK_CAL_CNT2.COUNTER=(F_cnt2/F_cnt1)*(CLK_CAL_CNT1.COUNTER)"]
#[inline(always)]
pub fn cal_counter2(&self) -> CAL_COUNTER2_R {
CAL_COUNTER2_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
|
#![no_std]
#![allow(non_snake_case)]
#![allow(unused)]
#![allow(non_camel_case_types)]
// GEN CUT HERE
include!(concat!(env!("OUT_DIR"), "/src/lib_inner.rs"));
|
#[doc = "Register `WISR` reader"]
pub type R = crate::R<WISR_SPEC>;
#[doc = "Field `TEIF` reader - TEIF"]
pub type TEIF_R = crate::BitReader;
#[doc = "Field `ERIF` reader - ERIF"]
pub type ERIF_R = crate::BitReader;
#[doc = "Field `BUSY` reader - BUSY"]
pub type BUSY_R = crate::BitReader;
#[doc = "Field `PLLLS` reader - PLLLS"]
pub type PLLLS_R = crate::BitReader;
#[doc = "Field `PLLLIF` reader - PLLLIF"]
pub type PLLLIF_R = crate::BitReader;
#[doc = "Field `PLLUIF` reader - PLLUIF"]
pub type PLLUIF_R = crate::BitReader;
#[doc = "Field `RRS` reader - RRS"]
pub type RRS_R = crate::BitReader;
#[doc = "Field `RRIF` reader - RRIF"]
pub type RRIF_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - TEIF"]
#[inline(always)]
pub fn teif(&self) -> TEIF_R {
TEIF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - ERIF"]
#[inline(always)]
pub fn erif(&self) -> ERIF_R {
ERIF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - BUSY"]
#[inline(always)]
pub fn busy(&self) -> BUSY_R {
BUSY_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 8 - PLLLS"]
#[inline(always)]
pub fn pllls(&self) -> PLLLS_R {
PLLLS_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - PLLLIF"]
#[inline(always)]
pub fn plllif(&self) -> PLLLIF_R {
PLLLIF_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - PLLUIF"]
#[inline(always)]
pub fn plluif(&self) -> PLLUIF_R {
PLLUIF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 12 - RRS"]
#[inline(always)]
pub fn rrs(&self) -> RRS_R {
RRS_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - RRIF"]
#[inline(always)]
pub fn rrif(&self) -> RRIF_R {
RRIF_R::new(((self.bits >> 13) & 1) != 0)
}
}
#[doc = "DSI wrapper interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wisr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WISR_SPEC;
impl crate::RegisterSpec for WISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wisr::R`](R) reader structure"]
impl crate::Readable for WISR_SPEC {}
#[doc = "`reset()` method sets WISR to value 0"]
impl crate::Resettable for WISR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use sudo_test::{Command, Env, TextFile};
use crate::{
visudo::{CHMOD_EXEC, DEFAULT_EDITOR, EDITOR_TRUE, ETC_SUDOERS, LOGS_PATH, TMP_SUDOERS},
Result, SUDOERS_ALL_ALL_NOPASSWD, SUDOERS_ROOT_ALL, USERNAME,
};
macro_rules! assert_snapshot {
($($tt:tt)*) => {
insta::with_settings!({
filters => vec![(r"sudoers-[a-zA-Z0-9]{6}", "[mkdtemp]")],
prepend_module_to_snapshot => false,
snapshot_path => "../snapshots/visudo/flag_file",
}, {
insta::assert_snapshot!($($tt)*)
});
};
}
#[test]
fn creates_sudoers_file_with_default_ownership_and_perms_if_it_doesnt_exist() -> Result<()> {
let env = Env("")
.file(DEFAULT_EDITOR, TextFile(EDITOR_TRUE).chmod(CHMOD_EXEC))
.build()?;
let file_path = TMP_SUDOERS;
Command::new("visudo")
.args(["-f", file_path])
.output(&env)?
.assert_success()?;
let ls_output = Command::new("ls")
.args(["-l", file_path])
.output(&env)?
.stdout()?;
assert!(ls_output.starts_with("-rw-r----- 1 root root"));
Ok(())
}
#[test]
fn saves_file_if_no_syntax_errors() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let unexpected = SUDOERS_ROOT_ALL;
let file_path = TMP_SUDOERS;
let env = Env("")
.file(file_path, unexpected)
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo '{expected}' > $2"#
))
.chmod(CHMOD_EXEC),
)
.build()?;
Command::new("visudo")
.args(["-f", file_path])
.output(&env)?
.assert_success()?;
let actual = Command::new("cat").arg(file_path).output(&env)?.stdout()?;
assert_eq!(expected, actual);
Ok(())
}
#[test]
fn positional_argument() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let unexpected = SUDOERS_ROOT_ALL;
let file_path = TMP_SUDOERS;
let env = Env("")
.file(file_path, unexpected)
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo '{expected}' > $2"#
))
.chmod(CHMOD_EXEC),
)
.build()?;
Command::new("visudo")
.arg(file_path)
.output(&env)?
.assert_success()?;
let actual = Command::new("cat").arg(file_path).output(&env)?.stdout()?;
assert_eq!(expected, actual);
Ok(())
}
#[test]
fn flag_has_precedence_over_positional_argument() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let original = SUDOERS_ROOT_ALL;
let file_path = "/tmp/sudoers";
let file_path2 = "/tmp/sudoers2";
let env = Env("")
.file(file_path, original)
.file(file_path2, original)
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo '{expected}' > $2"#
))
.chmod(CHMOD_EXEC),
)
.build()?;
Command::new("visudo")
.args(["-f", file_path])
.arg(file_path2)
.output(&env)?
.assert_success()?;
let changed = Command::new("cat").arg(file_path).output(&env)?.stdout()?;
assert_eq!(expected, changed);
let unchanged = Command::new("cat").arg(file_path2).output(&env)?.stdout()?;
assert_eq!(original, unchanged);
Ok(())
}
#[test]
fn etc_sudoers_is_not_modified() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let unexpected = SUDOERS_ROOT_ALL;
let env = Env(expected)
.file(
DEFAULT_EDITOR,
TextFile(format!(
"#!/bin/sh
echo '{unexpected}' > $2"
))
.chmod(CHMOD_EXEC),
)
.build()?;
Command::new("visudo")
.args(["--file", TMP_SUDOERS])
.output(&env)?
.assert_success()?;
let actual = Command::new("cat")
.arg(ETC_SUDOERS)
.output(&env)?
.stdout()?;
assert_eq!(expected, actual);
Ok(())
}
#[test]
fn passes_temporary_file_to_editor() -> Result<()> {
let env = Env("")
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo "$@" > {LOGS_PATH}"#
))
.chmod(CHMOD_EXEC),
)
.build()?;
let file_path = TMP_SUDOERS;
Command::new("visudo")
.args(["--file", file_path])
.output(&env)?
.assert_success()?;
let args = Command::new("cat").arg(LOGS_PATH).output(&env)?.stdout()?;
if sudo_test::is_original_sudo() {
assert_eq!(format!("-- {file_path}.tmp"), args);
} else {
assert_snapshot!(args);
}
Ok(())
}
#[test]
fn regular_user_can_create_file() -> Result<()> {
let env = Env("")
.file(DEFAULT_EDITOR, TextFile(EDITOR_TRUE).chmod("111"))
.user(USERNAME)
.build()?;
let file_path = TMP_SUDOERS;
Command::new("visudo")
.args(["-f", file_path])
.as_user(USERNAME)
.output(&env)?
.assert_success()?;
let ls_output = Command::new("ls")
.args(["-l", file_path])
.output(&env)?
.stdout()?;
assert!(ls_output.starts_with(&format!("-rw-r----- 1 {USERNAME} users")));
Ok(())
}
#[test]
fn regular_user_can_update_a_file_they_own() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let unexpected = SUDOERS_ROOT_ALL;
let file_path = TMP_SUDOERS;
let env = Env("")
.file(file_path, TextFile(unexpected).chown(USERNAME).chmod("666"))
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo '{expected}' > $2"#
))
.chmod("777"),
)
.user(USERNAME)
.build()?;
Command::new("visudo")
.args(["-f", file_path])
.as_user(USERNAME)
.output(&env)?
.assert_success()?;
let sudoers = Command::new("cat").arg(file_path).output(&env)?.stdout()?;
assert_eq!(expected, sudoers);
Ok(())
}
#[test]
#[ignore = "gh657"]
fn regular_user_cannot_update_a_file_they_dont_own() -> Result<()> {
let expected = SUDOERS_ALL_ALL_NOPASSWD;
let unexpected = SUDOERS_ROOT_ALL;
let file_path = TMP_SUDOERS;
let env = Env("")
.file(file_path, TextFile(unexpected).chmod("666"))
.file(
DEFAULT_EDITOR,
TextFile(format!(
r#"#!/bin/sh
echo '{expected}' > $2"#
))
.chmod("777"),
)
.user(USERNAME)
.build()?;
let output = Command::new("visudo")
.args(["-f", file_path])
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
assert_contains!(
output.stderr(),
"visudo: unable to set (uid, gid) of /tmp/sudoers.tmp"
);
Ok(())
}
|
pub const ANSI_R_F: &'static str = "\u001b[31m" ;
pub const ANSI_B_F: &'static str = "\u001b[34m" ;
pub const ANSI_G_F: &'static str = "\u001b[32m" ;
pub const ANSI_Y_F: &'static str = "\u001b[33m" ;
pub const ANSI_M_F: &'static str = "\u001b[35m" ;
pub const ANSI_C_F: &'static str = "\u001b[36m" ;
pub const ANSI_W_F: &'static str = "\u001b[37m" ;
pub const ANSI_D_F: &'static str = "\u001b[39m" ;
pub const ANSI_R_B: &'static str = "\u001b[41m" ;
pub const ANSI_B_B: &'static str = "\u001b[44m" ;
pub const ANSI_G_B: &'static str = "\u001b[42m" ;
pub const ANSI_Y_B: &'static str = "\u001b[43m" ;
pub const ANSI_M_B: &'static str = "\u001b[45m" ;
pub const ANSI_C_B: &'static str = "\u001b[46m" ;
pub const ANSI_W_B: &'static str = "\u001b[47m" ;
pub const ANSI_D_B: &'static str = "\u001b[49m" ;
pub const ANSI_D: &'static str = "\u001b[0m" ;
pub const ANSI_SAVE: &'static str = "\u001b7" ;
pub const ANSI_RESTORE: &'static str = "\u001b8" ;
pub const ANSI_PAGE : &'static str = "\u001b[2J" ;
macro_rules! jump_fmt {
() => ("\u001b[{};{}H")
}
pub fn jump_str (row: u16, col: u16) -> String {
format!(jump_fmt!(), row, col)
}
|
use crate::data::Symbol;
// use crate::cli::Listable;
const DEFAULT: [&str; 10] = [
"SPY",
"TSLA",
"DIS",
"AMD",
"NVDA",
"AAPL",
"MSFT",
"FB",
"GOOG",
"AMZN",
];
pub fn get_watch_list(s: &[Symbol]) -> Vec<Symbol> {
s.to_vec()
.into_iter()
.filter(|s| DEFAULT.iter().any(|x| *x == s.symbol.to_uppercase()))
.collect()
} |
use nb::block;
use embedded_hal::digital::v2::OutputPin;
use stm32f1xx_hal::{pac, prelude::*, timer::Timer};
pub fn blink() -> ! {
let cp = cortex_m::Peripherals::take().unwrap();
let dp = pac::Peripherals::take().unwrap();
let mut flash = dp.FLASH.constrain();
let mut rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut gpioc = dp.GPIOC.split(&mut rcc.apb2);
let mut timber = Timer::syst(cp.SYST, &clocks).start_count_down(1.hz());
let mut leds = [
gpioc.pc13.into_push_pull_output(&mut gpioc.crh).downgrade(),
gpioc.pc14.into_push_pull_output(&mut gpioc.crh).downgrade(),
gpioc.pc15.into_push_pull_output(&mut gpioc.crh).downgrade(),
];
loop {
block!(timber.wait()).unwrap();
for led in leds.iter_mut() {
block!(timber.wait()).unwrap();
led.set_high().unwrap();
}
// block!(timber.wait()).unwrap();
for led in leds.iter_mut() {
block!(timber.wait()).unwrap();
led.set_low().unwrap();
}
}
} |
extern crate metric;
use std::fmt::Debug;
use metric::temperature::*;
/// will determine whether a given temperature is in danger of
/// reaching the freezing point of water
pub fn in_danger_of_freezing<T>(temp: &T) -> bool
where T: PartialOrd<Celsius>
{
*temp < Celsius(5.0)
}
pub fn print_danger<T>(temp: T)
where T: PartialOrd<Celsius> + Debug
{
let could_freeze = in_danger_of_freezing(&temp);
let danger = if !could_freeze { " not" } else { "" };
println!("{:?} is{} in danger of freezing!", temp, danger);
}
fn main() {
print_danger(Fahrenheit(34.));
print_danger(Celsius(3.));
print_danger(Kelvin(293.));
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.