text stringlengths 8 4.13M |
|---|
use std::any::Any;
use std::marker::PhantomData;
use specs::*;
use dispatch::sysinfo::*;
use dispatch::syswrapper::*;
pub trait AbstractBuilder<'a> {
fn build<'b>(self, disp: DispatcherBuilder<'a, 'b>) -> DispatcherBuilder<'a, 'b>;
}
pub trait AbstractThreadLocalBuilder<'b> {
fn build_thread_local<'a>(self, disp: DispatcherBuilder<'a, 'b>) -> DispatcherBuilder<'a, 'b>;
}
pub struct SystemBuilder<T> {
args: Box<Any>,
marker: PhantomData<T>,
}
impl<T: SystemInfo> SystemBuilder<T> {
pub fn new<U: Any>(args: U) -> Self {
Self {
args: Box::new(args),
marker: PhantomData {},
}
}
}
impl<'a, T> AbstractBuilder<'a> for SystemBuilder<T>
where
T: for<'c> System<'c> + Send + SystemInfo + 'a,
T::Dependencies: SystemDeps,
for<'c> <T as System<'c>>::SystemData: SystemData<'c>,
{
fn build<'b>(self, disp: DispatcherBuilder<'a, 'b>) -> DispatcherBuilder<'a, 'b> {
disp.with(
SystemWrapper(T::new_args(self.args)),
T::name(),
&T::Dependencies::dependencies(),
)
}
}
impl<'b, T> AbstractThreadLocalBuilder<'b> for SystemBuilder<T>
where
T: for<'c> System<'c> + 'b,
T: SystemInfo,
{
fn build_thread_local<'a>(self, disp: DispatcherBuilder<'a, 'b>) -> DispatcherBuilder<'a, 'b> {
disp.with_thread_local(T::new_args(self.args))
}
}
|
extern crate clap;
mod intcode_machine;
mod util;
use intcode_machine::{run_all, Machine, State};
use std::collections::HashMap;
use std::io::{stdin, BufRead};
use util::{PartID, part_id_from_cli};
struct Frame {
map: HashMap<(i64, i64), i64>,
max_x: i64,
max_y: i64,
score: i64,
ball_x : i64,
bar_x: i64,
}
impl Frame {
pub fn new() -> Frame {
Frame {
map: HashMap::new(),
max_x: -1,
max_y: -1,
score: -1,
ball_x: -1,
bar_x: -1,
}
}
}
fn apply_change(machine: &mut Machine, frame: &mut Frame) {
while machine.has_output() {
let x = machine.pop_output();
let y = machine.pop_output();
let tile = machine.pop_output();
if x == -1 {
frame.score = tile;
continue;
}
if tile == 3 {
frame.bar_x = x;
}
if tile == 4 {
frame.ball_x = x;
}
frame.map.insert((x, y), tile);
frame.max_x = frame.max_x.max(x);
frame.max_y = frame.max_y.max(y);
}
}
fn render(frame: &Frame) {
for y in 0..=frame.max_y {
for x in 0..=frame.max_x {
let &tile = frame.map.get(&(x, y)).unwrap_or(&0);
print!(
"{}",
match tile {
0 => " ",
1 => "WW",
2 => "**",
3 => "__",
4 => "()",
_ => panic!("Wrong tile"),
}
);
}
println!("");
}
println!("Score = {}", frame.score);
}
fn load_machine() -> Machine {
let program = stdin().lock()
.split(',' as u8)
.map(|chunk| String::from_utf8(chunk.ok()?).ok()?.parse().ok())
.map(|result| result.expect("Failed to get next input"))
.collect();
Machine::new(&program)
}
fn autoplay(frame: &Frame) -> i64 {
frame.ball_x - frame.bar_x
}
fn main() {
let mut machine = load_machine();
let mut frame = Frame::new();
match part_id_from_cli() {
PartID::One => {
run_all(&mut machine, yield_iter![]);
apply_change(&mut machine, &mut frame);
let result = frame.map.iter().filter(|&(_, v)| *v == 2).count();
println!("{}", result);
}
PartID::Two => {
machine.memset(0, 2);
let mut state = run_all(&mut machine, yield_iter![]);
apply_change(&mut machine, &mut frame);
while state != State::Halted {
let joy_stick = autoplay(&frame);
state = run_all(&mut machine, yield_iter![joy_stick, ]);
apply_change(&mut machine, &mut frame);
}
println!("{}", frame.score);
}
}
}
|
#[doc = "Register `CFGR1` reader"]
pub type R = crate::R<CFGR1_SPEC>;
#[doc = "Register `CFGR1` writer"]
pub type W = crate::W<CFGR1_SPEC>;
#[doc = "Field `DMAEN` reader - Direct memory access enable"]
pub type DMAEN_R = crate::BitReader<DMAEN_A>;
#[doc = "Direct memory access enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DMAEN_A {
#[doc = "0: DMA mode disabled"]
Disabled = 0,
#[doc = "1: DMA mode enabled"]
Enabled = 1,
}
impl From<DMAEN_A> for bool {
#[inline(always)]
fn from(variant: DMAEN_A) -> Self {
variant as u8 != 0
}
}
impl DMAEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAEN_A {
match self.bits {
false => DMAEN_A::Disabled,
true => DMAEN_A::Enabled,
}
}
#[doc = "DMA mode disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == DMAEN_A::Disabled
}
#[doc = "DMA mode enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == DMAEN_A::Enabled
}
}
#[doc = "Field `DMAEN` writer - Direct memory access enable"]
pub type DMAEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DMAEN_A>;
impl<'a, REG, const O: u8> DMAEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "DMA mode disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(DMAEN_A::Disabled)
}
#[doc = "DMA mode enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(DMAEN_A::Enabled)
}
}
#[doc = "Field `DMACFG` reader - Direct memery access configuration"]
pub type DMACFG_R = crate::BitReader<DMACFG_A>;
#[doc = "Direct memery access configuration\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DMACFG_A {
#[doc = "0: DMA one shot mode"]
OneShot = 0,
#[doc = "1: DMA circular mode"]
Circular = 1,
}
impl From<DMACFG_A> for bool {
#[inline(always)]
fn from(variant: DMACFG_A) -> Self {
variant as u8 != 0
}
}
impl DMACFG_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMACFG_A {
match self.bits {
false => DMACFG_A::OneShot,
true => DMACFG_A::Circular,
}
}
#[doc = "DMA one shot mode"]
#[inline(always)]
pub fn is_one_shot(&self) -> bool {
*self == DMACFG_A::OneShot
}
#[doc = "DMA circular mode"]
#[inline(always)]
pub fn is_circular(&self) -> bool {
*self == DMACFG_A::Circular
}
}
#[doc = "Field `DMACFG` writer - Direct memery access configuration"]
pub type DMACFG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DMACFG_A>;
impl<'a, REG, const O: u8> DMACFG_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "DMA one shot mode"]
#[inline(always)]
pub fn one_shot(self) -> &'a mut crate::W<REG> {
self.variant(DMACFG_A::OneShot)
}
#[doc = "DMA circular mode"]
#[inline(always)]
pub fn circular(self) -> &'a mut crate::W<REG> {
self.variant(DMACFG_A::Circular)
}
}
#[doc = "Field `SCANDIR` reader - Scan sequence direction"]
pub type SCANDIR_R = crate::BitReader<SCANDIR_A>;
#[doc = "Scan sequence direction\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SCANDIR_A {
#[doc = "0: Upward scan (from CHSEL0 to CHSEL18)"]
Upward = 0,
#[doc = "1: Backward scan (from CHSEL18 to CHSEL0)"]
Backward = 1,
}
impl From<SCANDIR_A> for bool {
#[inline(always)]
fn from(variant: SCANDIR_A) -> Self {
variant as u8 != 0
}
}
impl SCANDIR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SCANDIR_A {
match self.bits {
false => SCANDIR_A::Upward,
true => SCANDIR_A::Backward,
}
}
#[doc = "Upward scan (from CHSEL0 to CHSEL18)"]
#[inline(always)]
pub fn is_upward(&self) -> bool {
*self == SCANDIR_A::Upward
}
#[doc = "Backward scan (from CHSEL18 to CHSEL0)"]
#[inline(always)]
pub fn is_backward(&self) -> bool {
*self == SCANDIR_A::Backward
}
}
#[doc = "Field `SCANDIR` writer - Scan sequence direction"]
pub type SCANDIR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SCANDIR_A>;
impl<'a, REG, const O: u8> SCANDIR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Upward scan (from CHSEL0 to CHSEL18)"]
#[inline(always)]
pub fn upward(self) -> &'a mut crate::W<REG> {
self.variant(SCANDIR_A::Upward)
}
#[doc = "Backward scan (from CHSEL18 to CHSEL0)"]
#[inline(always)]
pub fn backward(self) -> &'a mut crate::W<REG> {
self.variant(SCANDIR_A::Backward)
}
}
#[doc = "Field `RES` reader - Data resolution"]
pub type RES_R = crate::FieldReader<RES_A>;
#[doc = "Data resolution\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum RES_A {
#[doc = "0: 12-bit (14 ADCCLK cycles)"]
TwelveBit = 0,
#[doc = "1: 10-bit (13 ADCCLK cycles)"]
TenBit = 1,
#[doc = "2: 8-bit (11 ADCCLK cycles)"]
EightBit = 2,
#[doc = "3: 6-bit (9 ADCCLK cycles)"]
SixBit = 3,
}
impl From<RES_A> for u8 {
#[inline(always)]
fn from(variant: RES_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for RES_A {
type Ux = u8;
}
impl RES_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RES_A {
match self.bits {
0 => RES_A::TwelveBit,
1 => RES_A::TenBit,
2 => RES_A::EightBit,
3 => RES_A::SixBit,
_ => unreachable!(),
}
}
#[doc = "12-bit (14 ADCCLK cycles)"]
#[inline(always)]
pub fn is_twelve_bit(&self) -> bool {
*self == RES_A::TwelveBit
}
#[doc = "10-bit (13 ADCCLK cycles)"]
#[inline(always)]
pub fn is_ten_bit(&self) -> bool {
*self == RES_A::TenBit
}
#[doc = "8-bit (11 ADCCLK cycles)"]
#[inline(always)]
pub fn is_eight_bit(&self) -> bool {
*self == RES_A::EightBit
}
#[doc = "6-bit (9 ADCCLK cycles)"]
#[inline(always)]
pub fn is_six_bit(&self) -> bool {
*self == RES_A::SixBit
}
}
#[doc = "Field `RES` writer - Data resolution"]
pub type RES_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, RES_A>;
impl<'a, REG, const O: u8> RES_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "12-bit (14 ADCCLK cycles)"]
#[inline(always)]
pub fn twelve_bit(self) -> &'a mut crate::W<REG> {
self.variant(RES_A::TwelveBit)
}
#[doc = "10-bit (13 ADCCLK cycles)"]
#[inline(always)]
pub fn ten_bit(self) -> &'a mut crate::W<REG> {
self.variant(RES_A::TenBit)
}
#[doc = "8-bit (11 ADCCLK cycles)"]
#[inline(always)]
pub fn eight_bit(self) -> &'a mut crate::W<REG> {
self.variant(RES_A::EightBit)
}
#[doc = "6-bit (9 ADCCLK cycles)"]
#[inline(always)]
pub fn six_bit(self) -> &'a mut crate::W<REG> {
self.variant(RES_A::SixBit)
}
}
#[doc = "Field `ALIGN` reader - Data alignment"]
pub type ALIGN_R = crate::BitReader<ALIGN_A>;
#[doc = "Data alignment\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ALIGN_A {
#[doc = "0: Right alignment"]
Right = 0,
#[doc = "1: Left alignment"]
Left = 1,
}
impl From<ALIGN_A> for bool {
#[inline(always)]
fn from(variant: ALIGN_A) -> Self {
variant as u8 != 0
}
}
impl ALIGN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ALIGN_A {
match self.bits {
false => ALIGN_A::Right,
true => ALIGN_A::Left,
}
}
#[doc = "Right alignment"]
#[inline(always)]
pub fn is_right(&self) -> bool {
*self == ALIGN_A::Right
}
#[doc = "Left alignment"]
#[inline(always)]
pub fn is_left(&self) -> bool {
*self == ALIGN_A::Left
}
}
#[doc = "Field `ALIGN` writer - Data alignment"]
pub type ALIGN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ALIGN_A>;
impl<'a, REG, const O: u8> ALIGN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Right alignment"]
#[inline(always)]
pub fn right(self) -> &'a mut crate::W<REG> {
self.variant(ALIGN_A::Right)
}
#[doc = "Left alignment"]
#[inline(always)]
pub fn left(self) -> &'a mut crate::W<REG> {
self.variant(ALIGN_A::Left)
}
}
#[doc = "Field `EXTSEL` reader - External trigger selection"]
pub type EXTSEL_R = crate::FieldReader<EXTSEL_A>;
#[doc = "External trigger selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum EXTSEL_A {
#[doc = "0: Timer 1 TRGO Event"]
Tim1Trgo = 0,
#[doc = "1: Timer 1 CC4 event"]
Tim1Cc4 = 1,
#[doc = "3: Timer 3 TRGO event"]
Tim3Trgo = 3,
#[doc = "4: Timer 15 TRGO event"]
Tim15Trgo = 4,
}
impl From<EXTSEL_A> for u8 {
#[inline(always)]
fn from(variant: EXTSEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for EXTSEL_A {
type Ux = u8;
}
impl EXTSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<EXTSEL_A> {
match self.bits {
0 => Some(EXTSEL_A::Tim1Trgo),
1 => Some(EXTSEL_A::Tim1Cc4),
3 => Some(EXTSEL_A::Tim3Trgo),
4 => Some(EXTSEL_A::Tim15Trgo),
_ => None,
}
}
#[doc = "Timer 1 TRGO Event"]
#[inline(always)]
pub fn is_tim1_trgo(&self) -> bool {
*self == EXTSEL_A::Tim1Trgo
}
#[doc = "Timer 1 CC4 event"]
#[inline(always)]
pub fn is_tim1_cc4(&self) -> bool {
*self == EXTSEL_A::Tim1Cc4
}
#[doc = "Timer 3 TRGO event"]
#[inline(always)]
pub fn is_tim3_trgo(&self) -> bool {
*self == EXTSEL_A::Tim3Trgo
}
#[doc = "Timer 15 TRGO event"]
#[inline(always)]
pub fn is_tim15_trgo(&self) -> bool {
*self == EXTSEL_A::Tim15Trgo
}
}
#[doc = "Field `EXTSEL` writer - External trigger selection"]
pub type EXTSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, EXTSEL_A>;
impl<'a, REG, const O: u8> EXTSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Timer 1 TRGO Event"]
#[inline(always)]
pub fn tim1_trgo(self) -> &'a mut crate::W<REG> {
self.variant(EXTSEL_A::Tim1Trgo)
}
#[doc = "Timer 1 CC4 event"]
#[inline(always)]
pub fn tim1_cc4(self) -> &'a mut crate::W<REG> {
self.variant(EXTSEL_A::Tim1Cc4)
}
#[doc = "Timer 3 TRGO event"]
#[inline(always)]
pub fn tim3_trgo(self) -> &'a mut crate::W<REG> {
self.variant(EXTSEL_A::Tim3Trgo)
}
#[doc = "Timer 15 TRGO event"]
#[inline(always)]
pub fn tim15_trgo(self) -> &'a mut crate::W<REG> {
self.variant(EXTSEL_A::Tim15Trgo)
}
}
#[doc = "Field `EXTEN` reader - External trigger enable and polarity selection"]
pub type EXTEN_R = crate::FieldReader<EXTEN_A>;
#[doc = "External trigger enable and polarity selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum EXTEN_A {
#[doc = "0: Trigger detection disabled"]
Disabled = 0,
#[doc = "1: Trigger detection on the rising edge"]
RisingEdge = 1,
#[doc = "2: Trigger detection on the falling edge"]
FallingEdge = 2,
#[doc = "3: Trigger detection on both the rising and falling edges"]
BothEdges = 3,
}
impl From<EXTEN_A> for u8 {
#[inline(always)]
fn from(variant: EXTEN_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for EXTEN_A {
type Ux = u8;
}
impl EXTEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTEN_A {
match self.bits {
0 => EXTEN_A::Disabled,
1 => EXTEN_A::RisingEdge,
2 => EXTEN_A::FallingEdge,
3 => EXTEN_A::BothEdges,
_ => unreachable!(),
}
}
#[doc = "Trigger detection disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == EXTEN_A::Disabled
}
#[doc = "Trigger detection on the rising edge"]
#[inline(always)]
pub fn is_rising_edge(&self) -> bool {
*self == EXTEN_A::RisingEdge
}
#[doc = "Trigger detection on the falling edge"]
#[inline(always)]
pub fn is_falling_edge(&self) -> bool {
*self == EXTEN_A::FallingEdge
}
#[doc = "Trigger detection on both the rising and falling edges"]
#[inline(always)]
pub fn is_both_edges(&self) -> bool {
*self == EXTEN_A::BothEdges
}
}
#[doc = "Field `EXTEN` writer - External trigger enable and polarity selection"]
pub type EXTEN_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, EXTEN_A>;
impl<'a, REG, const O: u8> EXTEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "Trigger detection disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(EXTEN_A::Disabled)
}
#[doc = "Trigger detection on the rising edge"]
#[inline(always)]
pub fn rising_edge(self) -> &'a mut crate::W<REG> {
self.variant(EXTEN_A::RisingEdge)
}
#[doc = "Trigger detection on the falling edge"]
#[inline(always)]
pub fn falling_edge(self) -> &'a mut crate::W<REG> {
self.variant(EXTEN_A::FallingEdge)
}
#[doc = "Trigger detection on both the rising and falling edges"]
#[inline(always)]
pub fn both_edges(self) -> &'a mut crate::W<REG> {
self.variant(EXTEN_A::BothEdges)
}
}
#[doc = "Field `OVRMOD` reader - Overrun management mode"]
pub type OVRMOD_R = crate::BitReader<OVRMOD_A>;
#[doc = "Overrun management mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRMOD_A {
#[doc = "0: ADC_DR register is preserved with the old data when an overrun is detected"]
Preserved = 0,
#[doc = "1: ADC_DR register is overwritten with the last conversion result when an overrun is detected"]
Overwritten = 1,
}
impl From<OVRMOD_A> for bool {
#[inline(always)]
fn from(variant: OVRMOD_A) -> Self {
variant as u8 != 0
}
}
impl OVRMOD_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OVRMOD_A {
match self.bits {
false => OVRMOD_A::Preserved,
true => OVRMOD_A::Overwritten,
}
}
#[doc = "ADC_DR register is preserved with the old data when an overrun is detected"]
#[inline(always)]
pub fn is_preserved(&self) -> bool {
*self == OVRMOD_A::Preserved
}
#[doc = "ADC_DR register is overwritten with the last conversion result when an overrun is detected"]
#[inline(always)]
pub fn is_overwritten(&self) -> bool {
*self == OVRMOD_A::Overwritten
}
}
#[doc = "Field `OVRMOD` writer - Overrun management mode"]
pub type OVRMOD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OVRMOD_A>;
impl<'a, REG, const O: u8> OVRMOD_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "ADC_DR register is preserved with the old data when an overrun is detected"]
#[inline(always)]
pub fn preserved(self) -> &'a mut crate::W<REG> {
self.variant(OVRMOD_A::Preserved)
}
#[doc = "ADC_DR register is overwritten with the last conversion result when an overrun is detected"]
#[inline(always)]
pub fn overwritten(self) -> &'a mut crate::W<REG> {
self.variant(OVRMOD_A::Overwritten)
}
}
#[doc = "Field `CONT` reader - Single / continuous conversion mode"]
pub type CONT_R = crate::BitReader<CONT_A>;
#[doc = "Single / continuous conversion mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CONT_A {
#[doc = "0: Single conversion mode"]
Single = 0,
#[doc = "1: Continuous conversion mode"]
Continuous = 1,
}
impl From<CONT_A> for bool {
#[inline(always)]
fn from(variant: CONT_A) -> Self {
variant as u8 != 0
}
}
impl CONT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CONT_A {
match self.bits {
false => CONT_A::Single,
true => CONT_A::Continuous,
}
}
#[doc = "Single conversion mode"]
#[inline(always)]
pub fn is_single(&self) -> bool {
*self == CONT_A::Single
}
#[doc = "Continuous conversion mode"]
#[inline(always)]
pub fn is_continuous(&self) -> bool {
*self == CONT_A::Continuous
}
}
#[doc = "Field `CONT` writer - Single / continuous conversion mode"]
pub type CONT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CONT_A>;
impl<'a, REG, const O: u8> CONT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Single conversion mode"]
#[inline(always)]
pub fn single(self) -> &'a mut crate::W<REG> {
self.variant(CONT_A::Single)
}
#[doc = "Continuous conversion mode"]
#[inline(always)]
pub fn continuous(self) -> &'a mut crate::W<REG> {
self.variant(CONT_A::Continuous)
}
}
#[doc = "Field `WAIT` reader - Wait conversion mode"]
pub type WAIT_R = crate::BitReader<WAIT_A>;
#[doc = "Wait conversion mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum WAIT_A {
#[doc = "0: Wait conversion mode off"]
Disabled = 0,
#[doc = "1: Wait conversion mode on"]
Enabled = 1,
}
impl From<WAIT_A> for bool {
#[inline(always)]
fn from(variant: WAIT_A) -> Self {
variant as u8 != 0
}
}
impl WAIT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WAIT_A {
match self.bits {
false => WAIT_A::Disabled,
true => WAIT_A::Enabled,
}
}
#[doc = "Wait conversion mode off"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == WAIT_A::Disabled
}
#[doc = "Wait conversion mode on"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == WAIT_A::Enabled
}
}
#[doc = "Field `WAIT` writer - Wait conversion mode"]
pub type WAIT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WAIT_A>;
impl<'a, REG, const O: u8> WAIT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Wait conversion mode off"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(WAIT_A::Disabled)
}
#[doc = "Wait conversion mode on"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(WAIT_A::Enabled)
}
}
#[doc = "Field `AUTOFF` reader - Auto-off mode"]
pub type AUTOFF_R = crate::BitReader<AUTOFF_A>;
#[doc = "Auto-off mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AUTOFF_A {
#[doc = "0: Auto-off mode disabled"]
Disabled = 0,
#[doc = "1: Auto-off mode enabled"]
Enabled = 1,
}
impl From<AUTOFF_A> for bool {
#[inline(always)]
fn from(variant: AUTOFF_A) -> Self {
variant as u8 != 0
}
}
impl AUTOFF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AUTOFF_A {
match self.bits {
false => AUTOFF_A::Disabled,
true => AUTOFF_A::Enabled,
}
}
#[doc = "Auto-off mode disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == AUTOFF_A::Disabled
}
#[doc = "Auto-off mode enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == AUTOFF_A::Enabled
}
}
#[doc = "Field `AUTOFF` writer - Auto-off mode"]
pub type AUTOFF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, AUTOFF_A>;
impl<'a, REG, const O: u8> AUTOFF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Auto-off mode disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(AUTOFF_A::Disabled)
}
#[doc = "Auto-off mode enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(AUTOFF_A::Enabled)
}
}
#[doc = "Field `DISCEN` reader - Discontinuous mode"]
pub type DISCEN_R = crate::BitReader<DISCEN_A>;
#[doc = "Discontinuous mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DISCEN_A {
#[doc = "0: Discontinuous mode on regular channels disabled"]
Disabled = 0,
#[doc = "1: Discontinuous mode on regular channels enabled"]
Enabled = 1,
}
impl From<DISCEN_A> for bool {
#[inline(always)]
fn from(variant: DISCEN_A) -> Self {
variant as u8 != 0
}
}
impl DISCEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DISCEN_A {
match self.bits {
false => DISCEN_A::Disabled,
true => DISCEN_A::Enabled,
}
}
#[doc = "Discontinuous mode on regular channels disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == DISCEN_A::Disabled
}
#[doc = "Discontinuous mode on regular channels enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == DISCEN_A::Enabled
}
}
#[doc = "Field `DISCEN` writer - Discontinuous mode"]
pub type DISCEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DISCEN_A>;
impl<'a, REG, const O: u8> DISCEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Discontinuous mode on regular channels disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(DISCEN_A::Disabled)
}
#[doc = "Discontinuous mode on regular channels enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(DISCEN_A::Enabled)
}
}
#[doc = "Field `AWDSGL` reader - Enable the watchdog on a single channel or on all channels"]
pub type AWDSGL_R = crate::BitReader<AWDSGL_A>;
#[doc = "Enable the watchdog on a single channel or on all channels\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AWDSGL_A {
#[doc = "0: Analog watchdog enabled on all channels"]
AllChannels = 0,
#[doc = "1: Analog watchdog enabled on a single channel"]
SingleChannel = 1,
}
impl From<AWDSGL_A> for bool {
#[inline(always)]
fn from(variant: AWDSGL_A) -> Self {
variant as u8 != 0
}
}
impl AWDSGL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AWDSGL_A {
match self.bits {
false => AWDSGL_A::AllChannels,
true => AWDSGL_A::SingleChannel,
}
}
#[doc = "Analog watchdog enabled on all channels"]
#[inline(always)]
pub fn is_all_channels(&self) -> bool {
*self == AWDSGL_A::AllChannels
}
#[doc = "Analog watchdog enabled on a single channel"]
#[inline(always)]
pub fn is_single_channel(&self) -> bool {
*self == AWDSGL_A::SingleChannel
}
}
#[doc = "Field `AWDSGL` writer - Enable the watchdog on a single channel or on all channels"]
pub type AWDSGL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, AWDSGL_A>;
impl<'a, REG, const O: u8> AWDSGL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Analog watchdog enabled on all channels"]
#[inline(always)]
pub fn all_channels(self) -> &'a mut crate::W<REG> {
self.variant(AWDSGL_A::AllChannels)
}
#[doc = "Analog watchdog enabled on a single channel"]
#[inline(always)]
pub fn single_channel(self) -> &'a mut crate::W<REG> {
self.variant(AWDSGL_A::SingleChannel)
}
}
#[doc = "Field `AWDEN` reader - Analog watchdog enable"]
pub type AWDEN_R = crate::BitReader<AWDEN_A>;
#[doc = "Analog watchdog enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AWDEN_A {
#[doc = "0: Analog watchdog disabled on regular channels"]
Disabled = 0,
#[doc = "1: Analog watchdog enabled on regular channels"]
Enabled = 1,
}
impl From<AWDEN_A> for bool {
#[inline(always)]
fn from(variant: AWDEN_A) -> Self {
variant as u8 != 0
}
}
impl AWDEN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AWDEN_A {
match self.bits {
false => AWDEN_A::Disabled,
true => AWDEN_A::Enabled,
}
}
#[doc = "Analog watchdog disabled on regular channels"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == AWDEN_A::Disabled
}
#[doc = "Analog watchdog enabled on regular channels"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == AWDEN_A::Enabled
}
}
#[doc = "Field `AWDEN` writer - Analog watchdog enable"]
pub type AWDEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, AWDEN_A>;
impl<'a, REG, const O: u8> AWDEN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Analog watchdog disabled on regular channels"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(AWDEN_A::Disabled)
}
#[doc = "Analog watchdog enabled on regular channels"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(AWDEN_A::Enabled)
}
}
#[doc = "Field `AWDCH` reader - Analog watchdog channel selection"]
pub type AWDCH_R = crate::FieldReader;
#[doc = "Field `AWDCH` writer - Analog watchdog channel selection"]
pub type AWDCH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
impl R {
#[doc = "Bit 0 - Direct memory access enable"]
#[inline(always)]
pub fn dmaen(&self) -> DMAEN_R {
DMAEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Direct memery access configuration"]
#[inline(always)]
pub fn dmacfg(&self) -> DMACFG_R {
DMACFG_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Scan sequence direction"]
#[inline(always)]
pub fn scandir(&self) -> SCANDIR_R {
SCANDIR_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bits 3:4 - Data resolution"]
#[inline(always)]
pub fn res(&self) -> RES_R {
RES_R::new(((self.bits >> 3) & 3) as u8)
}
#[doc = "Bit 5 - Data alignment"]
#[inline(always)]
pub fn align(&self) -> ALIGN_R {
ALIGN_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bits 6:8 - External trigger selection"]
#[inline(always)]
pub fn extsel(&self) -> EXTSEL_R {
EXTSEL_R::new(((self.bits >> 6) & 7) as u8)
}
#[doc = "Bits 10:11 - External trigger enable and polarity selection"]
#[inline(always)]
pub fn exten(&self) -> EXTEN_R {
EXTEN_R::new(((self.bits >> 10) & 3) as u8)
}
#[doc = "Bit 12 - Overrun management mode"]
#[inline(always)]
pub fn ovrmod(&self) -> OVRMOD_R {
OVRMOD_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Single / continuous conversion mode"]
#[inline(always)]
pub fn cont(&self) -> CONT_R {
CONT_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Wait conversion mode"]
#[inline(always)]
pub fn wait(&self) -> WAIT_R {
WAIT_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Auto-off mode"]
#[inline(always)]
pub fn autoff(&self) -> AUTOFF_R {
AUTOFF_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Discontinuous mode"]
#[inline(always)]
pub fn discen(&self) -> DISCEN_R {
DISCEN_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 22 - Enable the watchdog on a single channel or on all channels"]
#[inline(always)]
pub fn awdsgl(&self) -> AWDSGL_R {
AWDSGL_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - Analog watchdog enable"]
#[inline(always)]
pub fn awden(&self) -> AWDEN_R {
AWDEN_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bits 26:30 - Analog watchdog channel selection"]
#[inline(always)]
pub fn awdch(&self) -> AWDCH_R {
AWDCH_R::new(((self.bits >> 26) & 0x1f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Direct memory access enable"]
#[inline(always)]
#[must_use]
pub fn dmaen(&mut self) -> DMAEN_W<CFGR1_SPEC, 0> {
DMAEN_W::new(self)
}
#[doc = "Bit 1 - Direct memery access configuration"]
#[inline(always)]
#[must_use]
pub fn dmacfg(&mut self) -> DMACFG_W<CFGR1_SPEC, 1> {
DMACFG_W::new(self)
}
#[doc = "Bit 2 - Scan sequence direction"]
#[inline(always)]
#[must_use]
pub fn scandir(&mut self) -> SCANDIR_W<CFGR1_SPEC, 2> {
SCANDIR_W::new(self)
}
#[doc = "Bits 3:4 - Data resolution"]
#[inline(always)]
#[must_use]
pub fn res(&mut self) -> RES_W<CFGR1_SPEC, 3> {
RES_W::new(self)
}
#[doc = "Bit 5 - Data alignment"]
#[inline(always)]
#[must_use]
pub fn align(&mut self) -> ALIGN_W<CFGR1_SPEC, 5> {
ALIGN_W::new(self)
}
#[doc = "Bits 6:8 - External trigger selection"]
#[inline(always)]
#[must_use]
pub fn extsel(&mut self) -> EXTSEL_W<CFGR1_SPEC, 6> {
EXTSEL_W::new(self)
}
#[doc = "Bits 10:11 - External trigger enable and polarity selection"]
#[inline(always)]
#[must_use]
pub fn exten(&mut self) -> EXTEN_W<CFGR1_SPEC, 10> {
EXTEN_W::new(self)
}
#[doc = "Bit 12 - Overrun management mode"]
#[inline(always)]
#[must_use]
pub fn ovrmod(&mut self) -> OVRMOD_W<CFGR1_SPEC, 12> {
OVRMOD_W::new(self)
}
#[doc = "Bit 13 - Single / continuous conversion mode"]
#[inline(always)]
#[must_use]
pub fn cont(&mut self) -> CONT_W<CFGR1_SPEC, 13> {
CONT_W::new(self)
}
#[doc = "Bit 14 - Wait conversion mode"]
#[inline(always)]
#[must_use]
pub fn wait(&mut self) -> WAIT_W<CFGR1_SPEC, 14> {
WAIT_W::new(self)
}
#[doc = "Bit 15 - Auto-off mode"]
#[inline(always)]
#[must_use]
pub fn autoff(&mut self) -> AUTOFF_W<CFGR1_SPEC, 15> {
AUTOFF_W::new(self)
}
#[doc = "Bit 16 - Discontinuous mode"]
#[inline(always)]
#[must_use]
pub fn discen(&mut self) -> DISCEN_W<CFGR1_SPEC, 16> {
DISCEN_W::new(self)
}
#[doc = "Bit 22 - Enable the watchdog on a single channel or on all channels"]
#[inline(always)]
#[must_use]
pub fn awdsgl(&mut self) -> AWDSGL_W<CFGR1_SPEC, 22> {
AWDSGL_W::new(self)
}
#[doc = "Bit 23 - Analog watchdog enable"]
#[inline(always)]
#[must_use]
pub fn awden(&mut self) -> AWDEN_W<CFGR1_SPEC, 23> {
AWDEN_W::new(self)
}
#[doc = "Bits 26:30 - Analog watchdog channel selection"]
#[inline(always)]
#[must_use]
pub fn awdch(&mut self) -> AWDCH_W<CFGR1_SPEC, 26> {
AWDCH_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR1_SPEC;
impl crate::RegisterSpec for CFGR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr1::R`](R) reader structure"]
impl crate::Readable for CFGR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr1::W`](W) writer structure"]
impl crate::Writable for CFGR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR1 to value 0"]
impl crate::Resettable for CFGR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::types;
use std::collections::HashMap;
pub fn compile_declaration(
module_builder: &fmm::build::ModuleBuilder,
declaration: &pir::ir::Declaration,
types: &HashMap<String, pir::types::RecordBody>,
) {
module_builder.declare_variable(
declaration.name(),
types::compile_unsized_closure(declaration.type_(), types),
);
}
|
use actix_web::{middleware, web, App, error,
Error, HttpRequest, HttpServer, HttpResponse};
use std::thread;
use bytes::Bytes;
use futures::{future::ok, Future, Stream};
use futures::unsync::mpsc;
fn index(req: HttpRequest) -> &'static str {
println!("REQ: {:?}", req);
no_loop();
"Index Hello world!"
}
fn no_loop(){
thread::spawn(move || {
let mut x = 1;
while x <= 10 {
thread::sleep(std::time::Duration::from_secs(2));
println!("{:?}", x);
x += 1;
}
});
}
/// async handler
fn index_async(req: HttpRequest) -> impl Future<Item = HttpResponse, Error = Error> {
println!("{:?}", req);
no_loop();
ok(HttpResponse::Ok()
.content_type("text/html")
.body(format!("Hello {}!", req.match_info().get("name").unwrap())))
}
/// async body
fn index_async_body(path: web::Path<String>) -> HttpResponse {
let text = format!("Hello {}!", *path);
let (tx, rx_body) = mpsc::unbounded();
let _ = tx.unbounded_send(Bytes::from(text.as_bytes()));
// 同步处理
let mut x = 1;
while x <= 3 {
thread::sleep(std::time::Duration::from_secs(1));
println!("{:?}", x);
x += 1;
}
HttpResponse::Ok()
.streaming(rx_body.map_err(|_| error::ErrorBadRequest("bad request")))
}
fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "actix_web=info");
env_logger::init();
HttpServer::new(|| {
App::new()
// enable logger
.wrap(middleware::Logger::default())
.service(web::resource("/index.html").to(|| "Hello world!"))
.service(web::resource("/").to(index))
// async handler
.service(
web::resource("/async/{name}").route(web::get().to_async(index_async)),
)
// async handler
.service(
web::resource("/async-body/{name}")
.route(web::get().to(index_async_body)),
)
})
.bind("127.0.0.1:8080")?
.run()
} |
extern crate april;
extern crate image;
use april::*;
use std::env::args;
fn main() {
println!("STARTING");
let mut detector = Detector::create();
detector.add_family(TagFamilyType::Tag16h5);
detector.add_family(TagFamilyType::Tag25h7);
detector.add_family(TagFamilyType::Tag25h9);
detector.add_family(TagFamilyType::Tag36h10);
detector.add_family(TagFamilyType::Tag36h11);
println!("READY");
let filename = args().nth(2).unwrap();
println!("OPENING {}", filename);
let img = image::open(filename).unwrap();
println!("LOADED");
let mut img: Image = image::imageops::grayscale(&img).into();
println!("CONVERTED");
//println!("{:?}", img);
let detections = detector.detect(&mut img);
println!("DETECTED");
println!("FOUND {} TAGS", detections.len());
for i in 0..detections.len() {
let detect = detections.get(i);
println!("{}\t{}\t{}\t{:?}", detect.id(), detect.goodness(), detect.margin(), detect.p());
}
}
|
use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian};
use csv::StringRecord;
use std::io::Cursor;
use std::ops::Index;
// TODO change Schema to ColumnTypes?
// TODO remove indexes, all info can be inferred from schema
// once Text is allocated as fixed length
use {DataType, Schema};
use error::*;
#[derive(Debug, Clone, PartialEq)]
pub struct Tuple{
pub data: Vec<u8>,
pub indexes: Vec<usize>, // pointers to start of data for a field
}
impl Tuple {
// simple init for testing purposes
pub fn new(data: Vec<Vec<u8>>) -> Self {
// index of next value is calculated by adding length
// of current value. So pop last value of indexes, it's
// the index for a value that doesn't exist
let mut buf = vec![];
let mut i_count = 0;
let mut indexes = vec![i_count]; // TODO fix for empty data case
for xs in data.iter() {
i_count += xs.len();
indexes.push(i_count);
buf.extend_from_slice(xs);
}
let _ = indexes.pop(); //
Tuple {
data: buf,
indexes: indexes,
}
}
pub fn to_string(self, schema: &Schema) -> Result<String> {
assert_eq!(schema.column_types.len(), self.indexes.len());
let fields = (0..self.indexes.len()).map(|i| {
display_with_type(&self[i], &schema.column_types[i])
}).collect::<Result<Vec<_>>>()?;
Ok(fields.join(", "))
}
}
pub fn display_with_type(data: &[u8], data_type: &DataType) -> Result<String> {
match *data_type {
DataType::SmallInt => {
// read it into u16
let mut s = String::new();
let mut rdr = Cursor::new(data);
let int = rdr.read_u16::<BigEndian>()?;
s.push_str(&int.to_string()[..]);
Ok(s)
},
DataType::Integer => {
// read it into u32
let mut s = String::new();
let mut rdr = Cursor::new(data);
let int = rdr.read_u32::<BigEndian>()?;
s.push_str(&int.to_string()[..]);
Ok(s)
},
DataType::Float => {
// read it into f32
let mut s = String::new();
let mut rdr = Cursor::new(data);
let float = rdr.read_f32::<BigEndian>()?;
s.push_str(&float.to_string()[..]);
Ok(s)
},
DataType::Text(_) => {
let s = String::from_utf8(data.to_vec())
.chain_err(|| "Error converting back to Utf8 for display")?;
Ok(s.trim_right_matches('\0').to_owned())
},
}
}
pub fn string_to_binary(s: &str, data_type: &DataType) -> Result<Vec<u8>> {
match *data_type {
DataType::SmallInt => {
//TODO support other radix
let integer = s.parse::<u16>()?;
let mut buf = Vec::new();
buf.write_u16::<BigEndian>(integer)?;
Ok(buf)
},
DataType::Integer => {
//TODO support other radix
let integer = s.parse::<u32>()?;
let mut buf = Vec::new();
buf.write_u32::<BigEndian>(integer)?;
Ok(buf)
},
DataType::Float => {
let float = s.parse::<f32>()?;
let mut buf = Vec::new();
buf.write_f32::<BigEndian>(float)?;
Ok(buf)
},
DataType::Text(x) => {
// Requires padding 0 (null) bytes out to text alloc
let mut bytes = s.as_bytes().to_vec();
let padding_len = x - bytes.len(); // for now panic on underflow
let padding = (0..padding_len).map(|_| 0);
bytes.extend(padding);
Ok(bytes)
},
}
}
impl Index<usize> for Tuple {
type Output = [u8];
fn index(&self, index: usize) -> &Self::Output {
if index == self.indexes.len() - 1 {
&self.data[self.indexes[index]..]
} else {
&self.data[self.indexes[index]..self.indexes[index+1]]
}
}
}
impl Tuple {
pub fn from_stringrecord(record: StringRecord, schema: &Schema) -> Result<Self> {
let mut indexes = Vec::new();
let mut data = Vec::new();
for col_idx in 0..record.len() {
// Get the pointer to the start of next field
indexes.push(data.len());
// Now convert based on Schema
let mut field_data = string_to_binary(
&record[col_idx],
&schema.column_types[col_idx]
)?;
data.append(&mut field_data);
}
Ok(Tuple {
data: data,
indexes: indexes,
})
}
}
// More final version of type conversions.
// Above will be be refactored to use the below correctly.
pub trait FromTupleField {
fn from_tuple_field(field: &[u8]) -> Result<Self>
where Self: Sized;
}
impl FromTupleField for u16 {
fn from_tuple_field(field: &[u8]) -> Result<u16> {
if field.len() != 2 {
return Err("data has wrong number of bytes".into());
}
let mut rdr = Cursor::new(field);
rdr.read_u16::<BigEndian>()
.chain_err(|| "Error converting field")
}
}
impl FromTupleField for u32 {
fn from_tuple_field(field: &[u8]) -> Result<u32> {
if field.len() != 4 {
return Err("data has wrong number of bytes".into());
}
let mut rdr = Cursor::new(field);
rdr.read_u32::<BigEndian>()
.chain_err(|| "Error converting field")
}
}
impl FromTupleField for f32 {
fn from_tuple_field(field: &[u8]) -> Result<f32> {
if field.len() != 4 {
return Err("data has wrong number of bytes".into());
}
let mut rdr = Cursor::new(field);
rdr.read_f32::<BigEndian>()
.chain_err(|| "Error converting field")
}
}
impl FromTupleField for String {
fn from_tuple_field(field: &[u8]) -> Result<String> {
String::from_utf8(field.to_vec())
.chain_err(|| "Error converting field")
}
}
pub fn field_parse<T: FromTupleField>(field: &[u8]) -> Result<T> {
FromTupleField::from_tuple_field(field)
}
impl Tuple {
pub fn get_parse<T>(&self, col: usize) -> Result<T>
where T: FromTupleField
{
let field = &self[col];
field_parse::<T>(field)
}
}
// Into (opposite of From) TupleRecord
// Should never fail, so panic on error
pub trait ToTupleField {
fn to_tuple_field(self) -> Vec<u8>;
}
impl ToTupleField for u16 {
fn to_tuple_field(self) -> Vec<u8> {
let mut buf = Vec::new();
buf.write_u16::<BigEndian>(self).expect("Bad Convert");
buf
}
}
impl ToTupleField for u32 {
fn to_tuple_field(self) -> Vec<u8> {
let mut buf = Vec::new();
buf.write_u32::<BigEndian>(self).expect("Bad Convert");
buf
}
}
impl ToTupleField for f32 {
fn to_tuple_field(self) -> Vec<u8> {
let mut buf = Vec::new();
buf.write_f32::<BigEndian>(self).expect("Bad Convert");
buf
}
}
impl ToTupleField for String {
fn to_tuple_field(self) -> Vec<u8> {
self.into_bytes()
}
}
// Tuple Append
// TODO test
impl Tuple {
pub fn append(mut self, other: &mut Tuple) -> Tuple {
let shift = self.data.len();
self.data.append(&mut other.data);
// shift indexes on second tuple
let other_indexes = other.indexes
.iter()
.map(|i| {
*i + shift
});
self.indexes.extend(other_indexes);
self
}
}
// TODO test text alloc
#[cfg(test)]
mod tests {
use super::*;
fn make_tuples() -> Vec<Tuple> {
let t0 = Tuple::new(
vec![
b"one".to_vec(),
vec![0u8, 2],
b"three".to_vec(),
]
);
let t1 = Tuple::new(
vec![
b"four".to_vec(),
vec![0u8, 66],
b"six".to_vec(),
]
);
vec![t0, t1]
}
#[test]
fn test_append() {
let tuples = make_tuples();
println!("{:?}", tuples[0]);
println!("{:?}", tuples[1]);
let expected = Tuple {
data: vec![
111,110,101,0,2,116,104,114,101,101,
102,111,117,114,0,66,115,105,120,
],
indexes: vec![
0,3,5,10,14,16,
],
};
let t0 = tuples[0].clone();
let mut t1 = tuples[1].clone();
assert_eq!(expected, t0.append(&mut t1));
}
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under both the MIT license found in the
* LICENSE-MIT file in the root directory of this source tree and the Apache
* License, Version 2.0 found in the LICENSE-APACHE file in the root directory
* of this source tree.
*/
#![cfg_attr(feature = "str_pattern_extensions", feature(pattern))]
#![cfg_attr(feature = "str_pattern_extensions", feature(associated_type_bounds))]
//! A collection of well-tested primitives that have been useful. Most modules stand alone.
pub mod any;
pub mod cast;
pub mod cell;
pub mod cmp;
pub mod coerce;
pub mod dupe;
pub(crate) mod ext;
pub mod file;
pub mod hash;
pub mod phantom;
pub mod prelude;
pub mod types;
pub mod variants;
#[cfg(test)]
mod test;
/// Causes Rust to exit the process when any panic occurs.
#[deprecated(
since = "0.4.2",
note = "Compiling with -Cpanic=abort provides similar behavior and smaller binaries"
)]
pub fn terminate_on_panic() {
let orig_hook = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic_info| {
orig_hook(panic_info);
std::process::exit(1);
}));
}
|
use serde::Deserialize;
use common::result::Result;
use crate::domain::role::{RoleId, RoleRepository};
use crate::domain::user::{UserId, UserRepository};
#[derive(Deserialize)]
pub struct ChangeRoleCommand {
pub role_id: String,
}
pub struct ChangeRole<'a> {
role_repo: &'a dyn RoleRepository,
user_repo: &'a dyn UserRepository,
}
impl<'a> ChangeRole<'a> {
pub fn new(role_repo: &'a dyn RoleRepository, user_repo: &'a dyn UserRepository) -> Self {
ChangeRole {
role_repo,
user_repo,
}
}
pub async fn exec(
&self,
auth_id: String,
user_id: String,
cmd: ChangeRoleCommand,
) -> Result<()> {
let admin = self.user_repo.find_by_id(&UserId::new(auth_id)?).await?;
let mut user = self.user_repo.find_by_id(&UserId::new(user_id)?).await?;
let role = self
.role_repo
.find_by_id(&RoleId::new(cmd.role_id)?)
.await?;
user.change_role(role, &admin)?;
self.user_repo.save(&mut user).await?;
Ok(())
}
}
|
use sha2::{Digest, Sha512Trunc256};
pub const SIZE256: usize = 32;
pub type SHA512_256 = Sha512Trunc256;
impl super::Hash for SHA512_256 {
fn size() -> usize {
SHA512_256::output_size()
}
fn block_size() -> usize {
super::BLOCK_SIZE
}
fn reset(&mut self) {
Digest::reset(self)
}
fn sum(&mut self) -> Vec<u8> {
self.clone().result().as_slice().to_vec()
}
}
pub fn new512_256() -> SHA512_256 {
Digest::new()
}
pub fn sum512_256(b: &[u8]) -> [u8; SIZE256] {
let d = Sha512Trunc256::digest(b);
let mut out = [0u8; SIZE256];
out.copy_from_slice(d.as_slice());
out
}
|
fn main() {
let n = 4;
if n > 0 { println!("positive"); }
}
|
use self::Filter::*;
use bindgen::Builder;
use make_cmd::make;
use std::env::var;
use std::fs::copy;
use std::path::{Path, PathBuf};
use std::process::{Command, ExitStatus};
enum Filter {
Function,
Type,
Var,
}
const WHITELIST: &[(Filter, &str)] = &[
//
// cache
(Var, "startup_info"),
(Function, "validate_cache_entries"),
(Function, "setup_git_directory_gently"),
//
// repository
(Var, "the_repository"),
(Function, "initialize_the_repository"),
(Function, "repo_init"),
//
// revisions
(Type, "rev_info"),
(Function, "add_head_to_pending"),
(Function, "repo_init_revisions"),
//
// version
(Var, "git_version_string"),
];
fn main() -> Result<(), Box<dyn std::error::Error>> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
let header = root.join("src").join("git-sys.h");
let lib = root.join("lib");
let out = PathBuf::from(var("OUT_DIR").expect("OUT_DIR env var not found"));
// update the git submodule to the latest
Command::new("git")
.current_dir(&root)
.arg("submodule")
.arg("update")
.arg("--init")
.status()?
.success_or_panic();
// create the configure tool
make()
.current_dir(&lib)
.arg("configure")
.status()?
.success_or_panic();
// cache the autoconf configuration generated
let cache = out.join("configure.cache");
// run the configuration with our parameters
Command::new("./configure")
.current_dir(&lib)
.arg(format!("--cache-file={}", cache.display()))
.arg("NO_OPENSSL=1")
.arg("NO_CURL=1")
.status()?
.success_or_panic();
// run the actual build
make()
.current_dir(&lib)
.arg(format!("-j{}", num_cpus::get()))
.status()?
.success_or_panic();
// copy over the generated static libraries to our output directory
copy(lib.join("libgit.a"), out.join("libgit.a"))?;
copy(lib.join("vcs-svn").join("lib.a"), out.join("libvcssvn.a"))?;
copy(lib.join("xdiff").join("lib.a"), out.join("libxdiff.a"))?;
// link the libraries that we just built
println!("cargo:rustc-link-search=native={}", out.display());
println!("cargo:rustc-link-lib=static=git");
println!("cargo:rustc-link-lib=static=vcssvn");
println!("cargo:rustc-link-lib=static=xdiff");
// and link the system dependencies
println!("cargo:rustc-link-lib=z");
// apply our whitelist filters to a [`Builder`](bindgen::Builder)
let mut builder = Builder::default();
for (filter, name) in WHITELIST {
builder = match filter {
Function => builder.whitelist_function(name),
Type => builder.whitelist_type(name),
Var => builder.whitelist_var(name),
}
}
// generate bindings for our header
let bindings = builder
.header(header.display().to_string())
.clang_arg(format!("-I/{}", lib.display()))
.rustfmt_bindings(true)
.generate()
.expect("unable to generate bindings");
// write out the generated bindings
bindings.write_to_file(out.join("lib.rs"))?;
Ok(())
}
/// Helper trait to panic when a command doesn't return a success code
trait SuccessOrPanic {
fn success_or_panic(self);
}
impl SuccessOrPanic for ExitStatus {
/// Panic if our [`ExitStatus`] isn't successful
fn success_or_panic(self) {
if !self.success() {
panic!("ran into error code {} while building", self);
}
}
}
|
use std::collections::HashMap;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use toml;
#[derive(Serialize, Deserialize, Debug)]
pub struct Factoids {
pub factoids: HashMap<String, String>
}
impl Factoids {
pub fn load(src: &Path) -> Factoids {
let mut file = File::open(src).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
return toml::from_str(&contents).unwrap();
}
}
|
#[doc = "Register `MACMIIAR` reader"]
pub type R = crate::R<MACMIIAR_SPEC>;
#[doc = "Register `MACMIIAR` writer"]
pub type W = crate::W<MACMIIAR_SPEC>;
#[doc = "Field `MB` reader - MII busy"]
pub type MB_R = crate::BitReader<MB_A>;
#[doc = "MII busy\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MB_A {
#[doc = "1: This bit is set to 1 by the application to indicate that a read or write access is in progress"]
Busy = 1,
}
impl From<MB_A> for bool {
#[inline(always)]
fn from(variant: MB_A) -> Self {
variant as u8 != 0
}
}
impl MB_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<MB_A> {
match self.bits {
true => Some(MB_A::Busy),
_ => None,
}
}
#[doc = "This bit is set to 1 by the application to indicate that a read or write access is in progress"]
#[inline(always)]
pub fn is_busy(&self) -> bool {
*self == MB_A::Busy
}
}
#[doc = "Field `MB` writer - MII busy"]
pub type MB_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MB_A>;
impl<'a, REG, const O: u8> MB_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "This bit is set to 1 by the application to indicate that a read or write access is in progress"]
#[inline(always)]
pub fn busy(self) -> &'a mut crate::W<REG> {
self.variant(MB_A::Busy)
}
}
#[doc = "Field `MW` reader - MII write"]
pub type MW_R = crate::BitReader<MW_A>;
#[doc = "MII write\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MW_A {
#[doc = "0: Read operation"]
Read = 0,
#[doc = "1: Write operation"]
Write = 1,
}
impl From<MW_A> for bool {
#[inline(always)]
fn from(variant: MW_A) -> Self {
variant as u8 != 0
}
}
impl MW_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MW_A {
match self.bits {
false => MW_A::Read,
true => MW_A::Write,
}
}
#[doc = "Read operation"]
#[inline(always)]
pub fn is_read(&self) -> bool {
*self == MW_A::Read
}
#[doc = "Write operation"]
#[inline(always)]
pub fn is_write(&self) -> bool {
*self == MW_A::Write
}
}
#[doc = "Field `MW` writer - MII write"]
pub type MW_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MW_A>;
impl<'a, REG, const O: u8> MW_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Read operation"]
#[inline(always)]
pub fn read(self) -> &'a mut crate::W<REG> {
self.variant(MW_A::Read)
}
#[doc = "Write operation"]
#[inline(always)]
pub fn write(self) -> &'a mut crate::W<REG> {
self.variant(MW_A::Write)
}
}
#[doc = "Field `CR` reader - Clock range"]
pub type CR_R = crate::FieldReader<CR_A>;
#[doc = "Clock range\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CR_A {
#[doc = "0: 60-100MHz HCLK/42"]
Cr60100 = 0,
#[doc = "1: 100-150 MHz HCLK/62"]
Cr100150 = 1,
#[doc = "2: 20-35MHz HCLK/16"]
Cr2035 = 2,
#[doc = "3: 35-60MHz HCLK/16"]
Cr3560 = 3,
#[doc = "4: 150-168MHz HCLK/102"]
Cr150168 = 4,
}
impl From<CR_A> for u8 {
#[inline(always)]
fn from(variant: CR_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CR_A {
type Ux = u8;
}
impl CR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CR_A> {
match self.bits {
0 => Some(CR_A::Cr60100),
1 => Some(CR_A::Cr100150),
2 => Some(CR_A::Cr2035),
3 => Some(CR_A::Cr3560),
4 => Some(CR_A::Cr150168),
_ => None,
}
}
#[doc = "60-100MHz HCLK/42"]
#[inline(always)]
pub fn is_cr_60_100(&self) -> bool {
*self == CR_A::Cr60100
}
#[doc = "100-150 MHz HCLK/62"]
#[inline(always)]
pub fn is_cr_100_150(&self) -> bool {
*self == CR_A::Cr100150
}
#[doc = "20-35MHz HCLK/16"]
#[inline(always)]
pub fn is_cr_20_35(&self) -> bool {
*self == CR_A::Cr2035
}
#[doc = "35-60MHz HCLK/16"]
#[inline(always)]
pub fn is_cr_35_60(&self) -> bool {
*self == CR_A::Cr3560
}
#[doc = "150-168MHz HCLK/102"]
#[inline(always)]
pub fn is_cr_150_168(&self) -> bool {
*self == CR_A::Cr150168
}
}
#[doc = "Field `CR` writer - Clock range"]
pub type CR_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, CR_A>;
impl<'a, REG, const O: u8> CR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "60-100MHz HCLK/42"]
#[inline(always)]
pub fn cr_60_100(self) -> &'a mut crate::W<REG> {
self.variant(CR_A::Cr60100)
}
#[doc = "100-150 MHz HCLK/62"]
#[inline(always)]
pub fn cr_100_150(self) -> &'a mut crate::W<REG> {
self.variant(CR_A::Cr100150)
}
#[doc = "20-35MHz HCLK/16"]
#[inline(always)]
pub fn cr_20_35(self) -> &'a mut crate::W<REG> {
self.variant(CR_A::Cr2035)
}
#[doc = "35-60MHz HCLK/16"]
#[inline(always)]
pub fn cr_35_60(self) -> &'a mut crate::W<REG> {
self.variant(CR_A::Cr3560)
}
#[doc = "150-168MHz HCLK/102"]
#[inline(always)]
pub fn cr_150_168(self) -> &'a mut crate::W<REG> {
self.variant(CR_A::Cr150168)
}
}
#[doc = "Field `MR` reader - MII register - select the desired MII register in the PHY device"]
pub type MR_R = crate::FieldReader;
#[doc = "Field `MR` writer - MII register - select the desired MII register in the PHY device"]
pub type MR_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 5, O>;
#[doc = "Field `PA` reader - PHY address - select which of possible 32 PHYs is being accessed"]
pub type PA_R = crate::FieldReader;
#[doc = "Field `PA` writer - PHY address - select which of possible 32 PHYs is being accessed"]
pub type PA_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 5, O>;
impl R {
#[doc = "Bit 0 - MII busy"]
#[inline(always)]
pub fn mb(&self) -> MB_R {
MB_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - MII write"]
#[inline(always)]
pub fn mw(&self) -> MW_R {
MW_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 2:4 - Clock range"]
#[inline(always)]
pub fn cr(&self) -> CR_R {
CR_R::new(((self.bits >> 2) & 7) as u8)
}
#[doc = "Bits 6:10 - MII register - select the desired MII register in the PHY device"]
#[inline(always)]
pub fn mr(&self) -> MR_R {
MR_R::new(((self.bits >> 6) & 0x1f) as u8)
}
#[doc = "Bits 11:15 - PHY address - select which of possible 32 PHYs is being accessed"]
#[inline(always)]
pub fn pa(&self) -> PA_R {
PA_R::new(((self.bits >> 11) & 0x1f) as u8)
}
}
impl W {
#[doc = "Bit 0 - MII busy"]
#[inline(always)]
#[must_use]
pub fn mb(&mut self) -> MB_W<MACMIIAR_SPEC, 0> {
MB_W::new(self)
}
#[doc = "Bit 1 - MII write"]
#[inline(always)]
#[must_use]
pub fn mw(&mut self) -> MW_W<MACMIIAR_SPEC, 1> {
MW_W::new(self)
}
#[doc = "Bits 2:4 - Clock range"]
#[inline(always)]
#[must_use]
pub fn cr(&mut self) -> CR_W<MACMIIAR_SPEC, 2> {
CR_W::new(self)
}
#[doc = "Bits 6:10 - MII register - select the desired MII register in the PHY device"]
#[inline(always)]
#[must_use]
pub fn mr(&mut self) -> MR_W<MACMIIAR_SPEC, 6> {
MR_W::new(self)
}
#[doc = "Bits 11:15 - PHY address - select which of possible 32 PHYs is being accessed"]
#[inline(always)]
#[must_use]
pub fn pa(&mut self) -> PA_W<MACMIIAR_SPEC, 11> {
PA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Ethernet MAC MII address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`macmiiar::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`macmiiar::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACMIIAR_SPEC;
impl crate::RegisterSpec for MACMIIAR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`macmiiar::R`](R) reader structure"]
impl crate::Readable for MACMIIAR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`macmiiar::W`](W) writer structure"]
impl crate::Writable for MACMIIAR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MACMIIAR to value 0"]
impl crate::Resettable for MACMIIAR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright 2017-2018 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Serialization.
use core::fmt;
use core::{
convert::TryFrom,
iter::FromIterator,
marker::PhantomData,
mem,
mem::{
MaybeUninit,
},
ops::{Deref, Range, RangeInclusive},
time::Duration,
};
use core::num::{
NonZeroI8,
NonZeroI16,
NonZeroI32,
NonZeroI64,
NonZeroI128,
NonZeroU8,
NonZeroU16,
NonZeroU32,
NonZeroU64,
NonZeroU128,
};
use byte_slice_cast::{AsByteSlice, AsMutByteSlice, ToMutByteSlice};
#[cfg(target_has_atomic = "ptr")]
use crate::alloc::sync::Arc;
use crate::alloc::{
boxed::Box,
borrow::{Cow, ToOwned},
collections::{
BTreeMap, BTreeSet, VecDeque, LinkedList, BinaryHeap
},
rc::Rc,
string::String,
vec::Vec,
};
use crate::compact::Compact;
use crate::DecodeFinished;
use crate::encode_like::EncodeLike;
use crate::Error;
pub(crate) const MAX_PREALLOCATION: usize = 4 * 1024;
const A_BILLION: u32 = 1_000_000_000;
/// Trait that allows reading of data into a slice.
pub trait Input {
/// Should return the remaining length of the input data. If no information about the input
/// length is available, `None` should be returned.
///
/// The length is used to constrain the preallocation while decoding. Returning a garbage
/// length can open the doors for a denial of service attack to your application.
/// Otherwise, returning `None` can decrease the performance of your application.
fn remaining_len(&mut self) -> Result<Option<usize>, Error>;
/// Read the exact number of bytes required to fill the given buffer.
///
/// Note that this function is similar to `std::io::Read::read_exact` and not
/// `std::io::Read::read`.
fn read(&mut self, into: &mut [u8]) -> Result<(), Error>;
/// Read a single byte from the input.
fn read_byte(&mut self) -> Result<u8, Error> {
let mut buf = [0u8];
self.read(&mut buf[..])?;
Ok(buf[0])
}
/// Descend into nested reference when decoding.
/// This is called when decoding a new refence-based instance,
/// such as `Vec` or `Box`. Currently all such types are
/// allocated on the heap.
fn descend_ref(&mut self) -> Result<(), Error> {
Ok(())
}
/// Ascend to previous structure level when decoding.
/// This is called when decoding reference-based type is finished.
fn ascend_ref(&mut self) {}
/// !INTERNAL USE ONLY!
///
/// Decodes a `bytes::Bytes`.
#[cfg(feature = "bytes")]
#[doc(hidden)]
fn scale_internal_decode_bytes(&mut self) -> Result<bytes::Bytes, Error> where Self: Sized {
Vec::<u8>::decode(self).map(bytes::Bytes::from)
}
}
impl<'a> Input for &'a [u8] {
fn remaining_len(&mut self) -> Result<Option<usize>, Error> {
Ok(Some(self.len()))
}
fn read(&mut self, into: &mut [u8]) -> Result<(), Error> {
if into.len() > self.len() {
return Err("Not enough data to fill buffer".into());
}
let len = into.len();
into.copy_from_slice(&self[..len]);
*self = &self[len..];
Ok(())
}
}
#[cfg(feature = "std")]
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
use std::io::ErrorKind::*;
match err.kind() {
NotFound => "io error: NotFound".into(),
PermissionDenied => "io error: PermissionDenied".into(),
ConnectionRefused => "io error: ConnectionRefused".into(),
ConnectionReset => "io error: ConnectionReset".into(),
ConnectionAborted => "io error: ConnectionAborted".into(),
NotConnected => "io error: NotConnected".into(),
AddrInUse => "io error: AddrInUse".into(),
AddrNotAvailable => "io error: AddrNotAvailable".into(),
BrokenPipe => "io error: BrokenPipe".into(),
AlreadyExists => "io error: AlreadyExists".into(),
WouldBlock => "io error: WouldBlock".into(),
InvalidInput => "io error: InvalidInput".into(),
InvalidData => "io error: InvalidData".into(),
TimedOut => "io error: TimedOut".into(),
WriteZero => "io error: WriteZero".into(),
Interrupted => "io error: Interrupted".into(),
Other => "io error: Other".into(),
UnexpectedEof => "io error: UnexpectedEof".into(),
_ => "io error: Unknown".into(),
}
}
}
/// Wrapper that implements Input for any `Read` type.
#[cfg(feature = "std")]
pub struct IoReader<R: std::io::Read>(pub R);
#[cfg(feature = "std")]
impl<R: std::io::Read> Input for IoReader<R> {
fn remaining_len(&mut self) -> Result<Option<usize>, Error> {
Ok(None)
}
fn read(&mut self, into: &mut [u8]) -> Result<(), Error> {
self.0.read_exact(into).map_err(Into::into)
}
}
/// Trait that allows writing of data.
pub trait Output {
/// Write to the output.
fn write(&mut self, bytes: &[u8]);
/// Write a single byte to the output.
fn push_byte(&mut self, byte: u8) {
self.write(&[byte]);
}
}
#[cfg(not(feature = "std"))]
impl Output for Vec<u8> {
fn write(&mut self, bytes: &[u8]) {
self.extend_from_slice(bytes)
}
}
#[cfg(feature = "std")]
impl<W: std::io::Write> Output for W {
fn write(&mut self, bytes: &[u8]) {
(self as &mut dyn std::io::Write).write_all(bytes).expect("Codec outputs are infallible");
}
}
/// !INTERNAL USE ONLY!
///
/// This enum provides type information to optimize encoding/decoding by doing fake specialization.
#[doc(hidden)]
#[non_exhaustive]
pub enum TypeInfo {
/// Default value of [`Encode::TYPE_INFO`] to not require implementors to set this value in the trait.
Unknown,
U8,
I8,
U16,
I16,
U32,
I32,
U64,
I64,
U128,
I128,
F32,
F64,
}
/// Trait that allows zero-copy write of value-references to slices in LE format.
///
/// Implementations should override `using_encoded` for value types and `encode_to` and `size_hint` for allocating types.
/// Wrapper types should override all methods.
pub trait Encode {
// !INTERNAL USE ONLY!
// This const helps SCALE to optimize the encoding/decoding by doing fake specialization.
#[doc(hidden)]
const TYPE_INFO: TypeInfo = TypeInfo::Unknown;
/// If possible give a hint of expected size of the encoding.
///
/// This method is used inside default implementation of `encode`
/// to avoid re-allocations.
fn size_hint(&self) -> usize {
0
}
/// Convert self to a slice and append it to the destination.
fn encode_to<T: Output + ?Sized>(&self, dest: &mut T) {
self.using_encoded(|buf| dest.write(buf));
}
/// Convert self to an owned vector.
fn encode(&self) -> Vec<u8> {
let mut r = Vec::with_capacity(self.size_hint());
self.encode_to(&mut r);
r
}
/// Convert self to a slice and then invoke the given closure with it.
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(&self.encode())
}
/// Calculates the encoded size.
///
/// Should be used when the encoded data isn't required.
///
/// # Note
///
/// This works by using a special [`Output`] that only tracks the size. So, there are no allocations inside the
/// output. However, this can not prevent allocations that some types are doing inside their own encoding.
fn encoded_size(&self) -> usize {
let mut size_tracker = SizeTracker { written: 0 };
self.encode_to(&mut size_tracker);
size_tracker.written
}
}
// Implements `Output` and only keeps track of the number of written bytes
struct SizeTracker {
written: usize,
}
impl Output for SizeTracker {
fn write(&mut self, bytes: &[u8]) {
self.written += bytes.len();
}
fn push_byte(&mut self, _byte: u8) {
self.written += 1;
}
}
/// Trait that allows the length of a collection to be read, without having
/// to read and decode the entire elements.
pub trait DecodeLength {
/// Return the number of elements in `self_encoded`.
fn len(self_encoded: &[u8]) -> Result<usize, Error>;
}
/// Trait that allows zero-copy read of value-references from slices in LE format.
pub trait Decode: Sized {
// !INTERNAL USE ONLY!
// This const helps SCALE to optimize the encoding/decoding by doing fake specialization.
#[doc(hidden)]
const TYPE_INFO: TypeInfo = TypeInfo::Unknown;
/// Attempt to deserialise the value from input.
fn decode<I: Input>(input: &mut I) -> Result<Self, Error>;
/// Attempt to deserialize the value from input into a pre-allocated piece of memory.
///
/// The default implementation will just call [`Decode::decode`].
///
/// # Safety
///
/// If this function returns `Ok` then `dst` **must** be properly initialized.
///
/// This is enforced by requiring the implementation to return a [`DecodeFinished`]
/// which can only be created by calling [`DecodeFinished::assert_decoding_finished`] which is `unsafe`.
fn decode_into<I: Input>(input: &mut I, dst: &mut MaybeUninit<Self>) -> Result<DecodeFinished, Error> {
let value = Self::decode(input)?;
dst.write(value);
// SAFETY: We've written the decoded value to `dst` so calling this is safe.
unsafe { Ok(DecodeFinished::assert_decoding_finished()) }
}
/// Attempt to skip the encoded value from input.
///
/// The default implementation of this function is just calling [`Decode::decode`].
/// When possible, an implementation should provide a specialized implementation.
fn skip<I: Input>(input: &mut I) -> Result<(), Error> {
Self::decode(input).map(|_| ())
}
/// Returns the fixed encoded size of the type.
///
/// If it returns `Some(size)` then all possible values of this
/// type have the given size (in bytes) when encoded.
///
/// NOTE: A type with a fixed encoded size may return `None`.
fn encoded_fixed_size() -> Option<usize> {
None
}
}
/// Trait that allows zero-copy read/write of value-references to/from slices in LE format.
pub trait Codec: Decode + Encode {}
impl<S: Decode + Encode> Codec for S {}
/// Trait that bound `EncodeLike` along with `Encode`. Usefull for generic being used in function
/// with `EncodeLike` parameters.
pub trait FullEncode: Encode + EncodeLike {}
impl<S: Encode + EncodeLike> FullEncode for S {}
/// Trait that bound `EncodeLike` along with `Codec`. Usefull for generic being used in function
/// with `EncodeLike` parameters.
pub trait FullCodec: Decode + FullEncode {}
impl<S: Decode + FullEncode> FullCodec for S {}
/// A marker trait for types that wrap other encodable type.
///
/// Such types should not carry any additional information
/// that would require to be encoded, because the encoding
/// is assumed to be the same as the wrapped type.
///
/// The wrapped type that is referred to is the [`Deref::Target`].
pub trait WrapperTypeEncode: Deref {}
impl<T: ?Sized> WrapperTypeEncode for Box<T> {}
impl<T: ?Sized + Encode> EncodeLike for Box<T> {}
impl<T: Encode> EncodeLike<T> for Box<T> {}
impl<T: Encode> EncodeLike<Box<T>> for T {}
impl<T: ?Sized> WrapperTypeEncode for &T {}
impl<T: ?Sized + Encode> EncodeLike for &T {}
impl<T: Encode> EncodeLike<T> for &T {}
impl<T: Encode> EncodeLike<&T> for T {}
impl<T: Encode> EncodeLike<T> for &&T {}
impl<T: Encode> EncodeLike<&&T> for T {}
impl<T: ?Sized> WrapperTypeEncode for &mut T {}
impl<T: ?Sized + Encode> EncodeLike for &mut T {}
impl<T: Encode> EncodeLike<T> for &mut T {}
impl<T: Encode> EncodeLike<&mut T> for T {}
impl<'a, T: ToOwned + ?Sized> WrapperTypeEncode for Cow<'a, T> {}
impl<'a, T: ToOwned + Encode + ?Sized> EncodeLike for Cow<'a, T> {}
impl<'a, T: ToOwned + Encode> EncodeLike<T> for Cow<'a, T> {}
impl<'a, T: ToOwned + Encode> EncodeLike<Cow<'a, T>> for T {}
impl<T: ?Sized> WrapperTypeEncode for Rc<T> {}
impl<T: ?Sized + Encode> EncodeLike for Rc<T> {}
impl<T: Encode> EncodeLike<T> for Rc<T> {}
impl<T: Encode> EncodeLike<Rc<T>> for T {}
impl WrapperTypeEncode for String {}
impl EncodeLike for String {}
impl EncodeLike<&str> for String {}
impl EncodeLike<String> for &str {}
#[cfg(target_has_atomic = "ptr")]
mod atomic_ptr_targets {
use super::*;
impl<T: ?Sized> WrapperTypeEncode for Arc<T> {}
impl<T: ?Sized + Encode> EncodeLike for Arc<T> {}
impl<T: Encode> EncodeLike<T> for Arc<T> {}
impl<T: Encode> EncodeLike<Arc<T>> for T {}
}
#[cfg(feature = "bytes")]
mod feature_wrapper_bytes {
use super::*;
use bytes::Bytes;
impl WrapperTypeEncode for Bytes {}
impl EncodeLike for Bytes {}
impl EncodeLike<&[u8]> for Bytes {}
impl EncodeLike<Vec<u8>> for Bytes {}
impl EncodeLike<Bytes> for &[u8] {}
impl EncodeLike<Bytes> for Vec<u8> {}
}
#[cfg(feature = "bytes")]
struct BytesCursor {
bytes: bytes::Bytes,
position: usize
}
#[cfg(feature = "bytes")]
impl Input for BytesCursor {
fn remaining_len(&mut self) -> Result<Option<usize>, Error> {
Ok(Some(self.bytes.len() - self.position))
}
fn read(&mut self, into: &mut [u8]) -> Result<(), Error> {
if into.len() > self.bytes.len() - self.position {
return Err("Not enough data to fill buffer".into())
}
into.copy_from_slice(&self.bytes[self.position..self.position + into.len()]);
self.position += into.len();
Ok(())
}
fn scale_internal_decode_bytes(&mut self) -> Result<bytes::Bytes, Error> {
let length = <Compact<u32>>::decode(self)?.0 as usize;
bytes::Buf::advance(&mut self.bytes, self.position);
self.position = 0;
if length > self.bytes.len() {
return Err("Not enough data to fill buffer".into());
}
Ok(self.bytes.split_to(length))
}
}
/// Decodes a given `T` from `Bytes`.
#[cfg(feature = "bytes")]
pub fn decode_from_bytes<T>(bytes: bytes::Bytes) -> Result<T, Error> where T: Decode {
// We could just use implement `Input` for `Bytes` and use `Bytes::split_to`
// to move the cursor, however doing it this way allows us to prevent an
// unnecessary allocation when the `T` which is being deserialized doesn't
// take advantage of the fact that it's being deserialized from `Bytes`.
//
// `Bytes` can be cheaply created from a `Vec<u8>`. It is both zero-copy
// *and* zero-allocation. However once you `.clone()` it or call `split_to()`
// an extra one-time allocation is triggered where the `Bytes` changes it's internal
// representation from essentially being a `Box<[u8]>` into being an `Arc<Box<[u8]>>`.
//
// If the `T` is `Bytes` or is a structure which contains `Bytes` in it then
// we don't really care, because this allocation will have to be made anyway.
//
// However, if `T` doesn't contain any `Bytes` then this extra allocation is
// technically unnecessary, and we can avoid it by tracking the position ourselves
// and treating the underlying `Bytes` as a fancy `&[u8]`.
let mut input = BytesCursor {
bytes,
position: 0
};
T::decode(&mut input)
}
#[cfg(feature = "bytes")]
impl Decode for bytes::Bytes {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
input.scale_internal_decode_bytes()
}
}
impl<T, X> Encode for X where
T: Encode + ?Sized,
X: WrapperTypeEncode<Target = T>,
{
fn size_hint(&self) -> usize {
(**self).size_hint()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
(**self).using_encoded(f)
}
fn encode(&self) -> Vec<u8> {
(**self).encode()
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
(**self).encode_to(dest)
}
}
/// A marker trait for types that can be created solely from other decodable types.
///
/// The decoding of such type is assumed to be the same as the wrapped type.
pub trait WrapperTypeDecode: Sized {
/// A wrapped type.
type Wrapped: Into<Self>;
// !INTERNAL USE ONLY!
// This is a used to specialize `decode` for the wrapped type.
#[doc(hidden)]
#[inline]
fn decode_wrapped<I: Input>(input: &mut I) -> Result<Self, Error> where Self::Wrapped: Decode {
input.descend_ref()?;
let result = Ok(Self::Wrapped::decode(input)?.into());
input.ascend_ref();
result
}
}
impl<T> WrapperTypeDecode for Box<T> {
type Wrapped = T;
fn decode_wrapped<I: Input>(input: &mut I) -> Result<Self, Error> where Self::Wrapped: Decode {
input.descend_ref()?;
// Placement new is not yet stable, but we can just manually allocate a chunk of memory
// and convert it to a `Box` ourselves.
//
// The explicit types here are written out for clarity.
//
// TODO: Use `Box::new_uninit` once that's stable.
let layout = core::alloc::Layout::new::<MaybeUninit<T>>();
let ptr: *mut MaybeUninit<T> = if layout.size() == 0 {
core::ptr::NonNull::dangling().as_ptr()
} else {
// SAFETY: Layout has a non-zero size so calling this is safe.
let ptr: *mut u8 = unsafe {
crate::alloc::alloc::alloc(layout)
};
if ptr.is_null() {
crate::alloc::alloc::handle_alloc_error(layout);
}
ptr.cast()
};
// SAFETY: Constructing a `Box` from a piece of memory allocated with `std::alloc::alloc`
// is explicitly allowed as long as it was allocated with the global allocator
// and the memory layout matches.
//
// Constructing a `Box` from `NonNull::dangling` is also always safe as long
// as the underlying type is zero-sized.
let mut boxed: Box<MaybeUninit<T>> = unsafe { Box::from_raw(ptr) };
T::decode_into(input, &mut boxed)?;
// Decoding succeeded, so let's get rid of `MaybeUninit`.
//
// TODO: Use `Box::assume_init` once that's stable.
let ptr: *mut MaybeUninit<T> = Box::into_raw(boxed);
let ptr: *mut T = ptr.cast();
// SAFETY: `MaybeUninit` doesn't affect the memory layout, so casting the pointer back
// into a `Box` is safe.
let boxed: Box<T> = unsafe { Box::from_raw(ptr) };
input.ascend_ref();
Ok(boxed)
}
}
impl<T> WrapperTypeDecode for Rc<T> {
type Wrapped = T;
fn decode_wrapped<I: Input>(input: &mut I) -> Result<Self, Error> where Self::Wrapped: Decode {
// TODO: This is inefficient; use `Rc::new_uninit` once that's stable.
Box::<T>::decode(input).map(|output| output.into())
}
}
#[cfg(target_has_atomic = "ptr")]
impl<T> WrapperTypeDecode for Arc<T> {
type Wrapped = T;
fn decode_wrapped<I: Input>(input: &mut I) -> Result<Self, Error> where Self::Wrapped: Decode {
// TODO: This is inefficient; use `Arc::new_uninit` once that's stable.
Box::<T>::decode(input).map(|output| output.into())
}
}
impl<T, X> Decode for X where
T: Decode + Into<X>,
X: WrapperTypeDecode<Wrapped=T>,
{
#[inline]
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Self::decode_wrapped(input)
}
}
/// A macro that matches on a [`TypeInfo`] and expands a given macro per variant.
///
/// The first parameter to the given macro will be the type of variant (e.g. `u8`, `u32`, etc.) and other parameters
/// given to this macro.
///
/// The last parameter is the code that should be executed for the `Unknown` type info.
macro_rules! with_type_info {
( $type_info:expr, $macro:ident $( ( $( $params:ident ),* ) )?, { $( $unknown_variant:tt )* }, ) => {
match $type_info {
TypeInfo::U8 => { $macro!(u8 $( $( , $params )* )? ) },
TypeInfo::I8 => { $macro!(i8 $( $( , $params )* )? ) },
TypeInfo::U16 => { $macro!(u16 $( $( , $params )* )? ) },
TypeInfo::I16 => { $macro!(i16 $( $( , $params )* )? ) },
TypeInfo::U32 => { $macro!(u32 $( $( , $params )* )? ) },
TypeInfo::I32 => { $macro!(i32 $( $( , $params )* )? ) },
TypeInfo::U64 => { $macro!(u64 $( $( , $params )* )? ) },
TypeInfo::I64 => { $macro!(i64 $( $( , $params )* )? ) },
TypeInfo::U128 => { $macro!(u128 $( $( , $params )* )? ) },
TypeInfo::I128 => { $macro!(i128 $( $( , $params )* )? ) },
TypeInfo::Unknown => { $( $unknown_variant )* },
TypeInfo::F32 => { $macro!(f32 $( $( , $params )* )? ) },
TypeInfo::F64 => { $macro!(f64 $( $( , $params )* )? ) },
}
};
}
/// Something that can be encoded as a reference.
pub trait EncodeAsRef<'a, T: 'a> {
/// The reference type that is used for encoding.
type RefType: Encode + From<&'a T>;
}
impl<T: Encode, E: Encode> Encode for Result<T, E> {
fn size_hint(&self) -> usize {
1 + match *self {
Ok(ref t) => t.size_hint(),
Err(ref t) => t.size_hint(),
}
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
match *self {
Ok(ref t) => {
dest.push_byte(0);
t.encode_to(dest);
}
Err(ref e) => {
dest.push_byte(1);
e.encode_to(dest);
}
}
}
}
impl<T, LikeT, E, LikeE> EncodeLike<Result<LikeT, LikeE>> for Result<T, E>
where
T: EncodeLike<LikeT>,
LikeT: Encode,
E: EncodeLike<LikeE>,
LikeE: Encode,
{}
impl<T: Decode, E: Decode> Decode for Result<T, E> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
match input.read_byte()
.map_err(|e| e.chain("Could not result variant byte for `Result`"))?
{
0 => Ok(
Ok(T::decode(input).map_err(|e| e.chain("Could not Decode `Result::Ok(T)`"))?)
),
1 => Ok(
Err(E::decode(input).map_err(|e| e.chain("Could not decode `Result::Error(E)`"))?)
),
_ => Err("unexpected first byte decoding Result".into()),
}
}
}
/// Shim type because we can't do a specialised implementation for `Option<bool>` directly.
#[derive(Eq, PartialEq, Clone, Copy)]
pub struct OptionBool(pub Option<bool>);
impl fmt::Debug for OptionBool {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Encode for OptionBool {
fn size_hint(&self) -> usize {
1
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(&[match *self {
OptionBool(None) => 0u8,
OptionBool(Some(true)) => 1u8,
OptionBool(Some(false)) => 2u8,
}])
}
}
impl EncodeLike for OptionBool {}
impl Decode for OptionBool {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
match input.read_byte()? {
0 => Ok(OptionBool(None)),
1 => Ok(OptionBool(Some(true))),
2 => Ok(OptionBool(Some(false))),
_ => Err("unexpected first byte decoding OptionBool".into()),
}
}
}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<Option<U>> for Option<T> {}
impl<T: Encode> Encode for Option<T> {
fn size_hint(&self) -> usize {
1 + match *self {
Some(ref t) => t.size_hint(),
None => 0,
}
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
match *self {
Some(ref t) => {
dest.push_byte(1);
t.encode_to(dest);
}
None => dest.push_byte(0),
}
}
}
impl<T: Decode> Decode for Option<T> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
match input.read_byte()
.map_err(|e| e.chain("Could not decode variant byte for `Option`"))?
{
0 => Ok(None),
1 => Ok(
Some(T::decode(input).map_err(|e| e.chain("Could not decode `Option::Some(T)`"))?)
),
_ => Err("unexpected first byte decoding Option".into()),
}
}
}
macro_rules! impl_for_non_zero {
( $( $name:ty ),* $(,)? ) => {
$(
impl Encode for $name {
fn size_hint(&self) -> usize {
self.get().size_hint()
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
self.get().encode_to(dest)
}
fn encode(&self) -> Vec<u8> {
self.get().encode()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
self.get().using_encoded(f)
}
}
impl EncodeLike for $name {}
impl Decode for $name {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Self::new(Decode::decode(input)?)
.ok_or_else(|| Error::from("cannot create non-zero number from 0"))
}
}
)*
}
}
/// Encode the slice without prepending the len.
///
/// This is equivalent to encoding all the element one by one, but it is optimized for some types.
pub(crate) fn encode_slice_no_len<T: Encode, W: Output + ?Sized>(slice: &[T], dest: &mut W) {
macro_rules! encode_to {
( u8, $slice:ident, $dest:ident ) => {{
let typed = unsafe { mem::transmute::<&[T], &[u8]>(&$slice[..]) };
$dest.write(&typed)
}};
( i8, $slice:ident, $dest:ident ) => {{
// `i8` has the same size as `u8`. We can just convert it here and write to the
// dest buffer directly.
let typed = unsafe { mem::transmute::<&[T], &[u8]>(&$slice[..]) };
$dest.write(&typed)
}};
( $ty:ty, $slice:ident, $dest:ident ) => {{
if cfg!(target_endian = "little") {
let typed = unsafe { mem::transmute::<&[T], &[$ty]>(&$slice[..]) };
$dest.write(<[$ty] as AsByteSlice<$ty>>::as_byte_slice(typed))
} else {
for item in $slice.iter() {
item.encode_to(dest);
}
}
}};
}
with_type_info! {
<T as Encode>::TYPE_INFO,
encode_to(slice, dest),
{
for item in slice.iter() {
item.encode_to(dest);
}
},
}
}
/// Decode the vec (without a prepended len).
///
/// This is equivalent to decode all elements one by one, but it is optimized in some
/// situation.
pub fn decode_vec_with_len<T: Decode, I: Input>(
input: &mut I,
len: usize,
) -> Result<Vec<T>, Error> {
fn decode_unoptimized<I: Input, T: Decode>(
input: &mut I,
items_len: usize,
) -> Result<Vec<T>, Error> {
let input_capacity = input.remaining_len()?
.unwrap_or(MAX_PREALLOCATION)
.checked_div(mem::size_of::<T>())
.unwrap_or(0);
let mut r = Vec::with_capacity(input_capacity.min(items_len));
input.descend_ref()?;
for _ in 0..items_len {
r.push(T::decode(input)?);
}
input.ascend_ref();
Ok(r)
}
macro_rules! decode {
( $ty:ty, $input:ident, $len:ident ) => {{
if cfg!(target_endian = "little") || mem::size_of::<T>() == 1 {
let vec = read_vec_from_u8s::<_, $ty>($input, $len)?;
Ok(unsafe { mem::transmute::<Vec<$ty>, Vec<T>>(vec) })
} else {
decode_unoptimized($input, $len)
}
}};
}
with_type_info! {
<T as Decode>::TYPE_INFO,
decode(input, len),
{
decode_unoptimized(input, len)
},
}
}
impl_for_non_zero! {
NonZeroI8,
NonZeroI16,
NonZeroI32,
NonZeroI64,
NonZeroI128,
NonZeroU8,
NonZeroU16,
NonZeroU32,
NonZeroU64,
NonZeroU128,
}
impl<T: Encode, const N: usize> Encode for [T; N] {
fn size_hint(&self) -> usize {
mem::size_of::<T>() * N
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
encode_slice_no_len(&self[..], dest)
}
}
const fn calculate_array_bytesize<T, const N: usize>() -> usize {
struct AssertNotOverflow<T, const N: usize>(PhantomData<T>);
impl<T, const N: usize> AssertNotOverflow<T, N> {
const OK: () = assert!(mem::size_of::<T>().checked_mul(N).is_some(), "array size overflow");
}
#[allow(clippy::let_unit_value)]
let () = AssertNotOverflow::<T, N>::OK;
mem::size_of::<T>() * N
}
impl<T: Decode, const N: usize> Decode for [T; N] {
#[inline(always)]
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let mut array = MaybeUninit::uninit();
Self::decode_into(input, &mut array)?;
// SAFETY: `decode_into` succeeded, so the array is initialized.
unsafe {
Ok(array.assume_init())
}
}
fn decode_into<I: Input>(input: &mut I, dst: &mut MaybeUninit<Self>) -> Result<DecodeFinished, Error> {
let is_primitive = match <T as Decode>::TYPE_INFO {
| TypeInfo::U8
| TypeInfo::I8
=> true,
| TypeInfo::U16
| TypeInfo::I16
| TypeInfo::U32
| TypeInfo::I32
| TypeInfo::U64
| TypeInfo::I64
| TypeInfo::U128
| TypeInfo::I128
| TypeInfo::F32
| TypeInfo::F64
=> cfg!(target_endian = "little"),
TypeInfo::Unknown => false
};
if is_primitive {
// Let's read the array in bulk as that's going to be a lot
// faster than just reading each element one-by-one.
let ptr: *mut [T; N] = dst.as_mut_ptr();
let ptr: *mut u8 = ptr.cast();
let bytesize = calculate_array_bytesize::<T, N>();
// TODO: This is potentially slow; it'd be better if `Input` supported
// reading directly into uninitialized memory.
//
// SAFETY: The pointer is valid and points to a memory `bytesize` bytes big.
unsafe {
ptr.write_bytes(0, bytesize);
}
// SAFETY: We've zero-initialized everything so creating a slice here is safe.
let slice: &mut [u8] = unsafe {
core::slice::from_raw_parts_mut(ptr, bytesize)
};
input.read(slice)?;
// SAFETY: We've initialized the whole slice so calling this is safe.
unsafe {
return Ok(DecodeFinished::assert_decoding_finished());
}
}
let slice: &mut [MaybeUninit<T>; N] = {
let ptr: *mut [T; N] = dst.as_mut_ptr();
let ptr: *mut [MaybeUninit<T>; N] = ptr.cast();
// SAFETY: Casting `&mut MaybeUninit<[T; N]>` into `&mut [MaybeUninit<T>; N]` is safe.
unsafe { &mut *ptr }
};
/// A wrapper type to make sure the partially read elements are always
/// dropped in case an error occurs or the underlying `decode` implementation panics.
struct State<'a, T, const N: usize> {
count: usize,
slice: &'a mut [MaybeUninit<T>; N]
}
impl<'a, T, const N: usize> Drop for State<'a, T, N> {
fn drop(&mut self) {
if !mem::needs_drop::<T>() {
// If the types don't actually need to be dropped then don't even
// try to run the loop below.
//
// Most likely won't make a difference in release mode, but will
// make a difference in debug mode.
return;
}
// TODO: Use `MaybeUninit::slice_assume_init_mut` + `core::ptr::drop_in_place`
// once `slice_assume_init_mut` is stable.
for item in &mut self.slice[..self.count] {
// SAFETY: Each time we've read a new element we incremented `count`,
// and we only drop at most `count` elements here,
// so all of the elements we drop here are valid.
unsafe {
item.assume_init_drop();
}
}
}
}
let mut state = State {
count: 0,
slice
};
while state.count < state.slice.len() {
T::decode_into(input, &mut state.slice[state.count])?;
state.count += 1;
}
// We've successfully read everything, so disarm the `Drop` impl.
mem::forget(state);
// SAFETY: We've initialized the whole slice so calling this is safe.
unsafe {
Ok(DecodeFinished::assert_decoding_finished())
}
}
fn skip<I: Input>(input: &mut I) -> Result<(), Error> {
if Self::encoded_fixed_size().is_some() {
// Should skip the bytes, but Input does not support skip.
for _ in 0..N {
T::skip(input)?;
}
} else {
Self::decode(input)?;
}
Ok(())
}
fn encoded_fixed_size() -> Option<usize> {
Some(<T as Decode>::encoded_fixed_size()? * N)
}
}
impl<T: EncodeLike<U>, U: Encode, const N: usize> EncodeLike<[U; N]> for [T; N] {}
impl Encode for str {
fn size_hint(&self) -> usize {
self.as_bytes().size_hint()
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
self.as_bytes().encode_to(dest)
}
fn encode(&self) -> Vec<u8> {
self.as_bytes().encode()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
self.as_bytes().using_encoded(f)
}
}
impl<'a, T: ToOwned + ?Sized> Decode for Cow<'a, T>
where <T as ToOwned>::Owned: Decode,
{
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Ok(Cow::Owned(Decode::decode(input)?))
}
}
impl<T> EncodeLike for PhantomData<T> {}
impl<T> Encode for PhantomData<T> {
fn encode_to<W: Output + ?Sized>(&self, _dest: &mut W) {}
}
impl<T> Decode for PhantomData<T> {
fn decode<I: Input>(_input: &mut I) -> Result<Self, Error> {
Ok(PhantomData)
}
}
impl Decode for String {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Self::from_utf8(Vec::decode(input)?).map_err(|_| "Invalid utf8 sequence".into())
}
}
/// Writes the compact encoding of `len` do `dest`.
pub(crate) fn compact_encode_len_to<W: Output + ?Sized>(dest: &mut W, len: usize) -> Result<(), Error> {
if len > u32::MAX as usize {
return Err("Attempted to serialize a collection with too many elements.".into());
}
Compact(len as u32).encode_to(dest);
Ok(())
}
impl<T: Encode> Encode for [T] {
fn size_hint(&self) -> usize {
mem::size_of::<u32>() + mem::size_of_val(self)
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
compact_encode_len_to(dest, self.len()).expect("Compact encodes length");
encode_slice_no_len(self, dest)
}
}
/// Create a `Vec<T>` by casting directly from a buffer of read `u8`s
///
/// The encoding of `T` must be equal to its binary representation, and size of `T` must be less or
/// equal to [`MAX_PREALLOCATION`].
pub(crate) fn read_vec_from_u8s<I, T>(input: &mut I, items_len: usize) -> Result<Vec<T>, Error>
where
I: Input,
T: ToMutByteSlice + Default + Clone,
{
debug_assert!(MAX_PREALLOCATION >= mem::size_of::<T>(), "Invalid precondition");
let byte_len = items_len.checked_mul(mem::size_of::<T>())
.ok_or("Item is too big and cannot be allocated")?;
let input_len = input.remaining_len()?;
// If there is input len and it cannot be pre-allocated then return directly.
if input_len.map(|l| l < byte_len).unwrap_or(false) {
return Err("Not enough data to decode vector".into())
}
// In both these branches we're going to be creating and resizing a Vec<T>,
// but casting it to a &mut [u8] for reading.
// Note: we checked that if input_len is some then it can preallocated.
let r = if input_len.is_some() || byte_len < MAX_PREALLOCATION {
// Here we pre-allocate the whole buffer.
let mut items: Vec<T> = vec![Default::default(); items_len];
let bytes_slice = items.as_mut_byte_slice();
input.read(bytes_slice)?;
items
} else {
// An allowed number of preallocated item.
// Note: `MAX_PREALLOCATION` is expected to be more or equal to size of `T`, precondition.
let max_preallocated_items = MAX_PREALLOCATION / mem::size_of::<T>();
// Here we pre-allocate only the maximum pre-allocation
let mut items: Vec<T> = vec![];
let mut items_remains = items_len;
while items_remains > 0 {
let items_len_read = max_preallocated_items.min(items_remains);
let items_len_filled = items.len();
let items_new_size = items_len_filled + items_len_read;
items.reserve_exact(items_len_read);
unsafe {
items.set_len(items_new_size);
}
let bytes_slice = items.as_mut_byte_slice();
let bytes_len_filled = items_len_filled * mem::size_of::<T>();
input.read(&mut bytes_slice[bytes_len_filled..])?;
items_remains = items_remains.saturating_sub(items_len_read);
}
items
};
Ok(r)
}
impl<T> WrapperTypeEncode for Vec<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<Vec<U>> for Vec<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<&[U]> for Vec<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<Vec<U>> for &[T] {}
impl<T: Decode> Decode for Vec<T> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
<Compact<u32>>::decode(input).and_then(move |Compact(len)| {
decode_vec_with_len(input, len as usize)
})
}
}
macro_rules! impl_codec_through_iterator {
($(
$type:ident
{ $( $generics:ident $( : $decode_additional:ident )? ),* }
{ $( $type_like_generics:ident ),* }
{ $( $impl_like_generics:tt )* }
)*) => {$(
impl<$( $generics: Encode ),*> Encode for $type<$( $generics, )*> {
fn size_hint(&self) -> usize {
mem::size_of::<u32>() $( + mem::size_of::<$generics>() * self.len() )*
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
compact_encode_len_to(dest, self.len()).expect("Compact encodes length");
for i in self.iter() {
i.encode_to(dest);
}
}
}
impl<$( $generics: Decode $( + $decode_additional )? ),*> Decode
for $type<$( $generics, )*>
{
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
<Compact<u32>>::decode(input).and_then(move |Compact(len)| {
input.descend_ref()?;
let result = Result::from_iter((0..len).map(|_| Decode::decode(input)));
input.ascend_ref();
result
})
}
}
impl<$( $impl_like_generics )*> EncodeLike<$type<$( $type_like_generics ),*>>
for $type<$( $generics ),*> {}
impl<$( $impl_like_generics )*> EncodeLike<&[( $( $type_like_generics, )* )]>
for $type<$( $generics ),*> {}
impl<$( $impl_like_generics )*> EncodeLike<$type<$( $type_like_generics ),*>>
for &[( $( $generics, )* )] {}
)*}
}
impl_codec_through_iterator! {
BTreeMap { K: Ord, V } { LikeK, LikeV}
{ K: EncodeLike<LikeK>, LikeK: Encode, V: EncodeLike<LikeV>, LikeV: Encode }
BTreeSet { T: Ord } { LikeT }
{ T: EncodeLike<LikeT>, LikeT: Encode }
LinkedList { T } { LikeT }
{ T: EncodeLike<LikeT>, LikeT: Encode }
BinaryHeap { T: Ord } { LikeT }
{ T: EncodeLike<LikeT>, LikeT: Encode }
}
impl<T: Encode> EncodeLike for VecDeque<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<&[U]> for VecDeque<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<VecDeque<U>> for &[T] {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<Vec<U>> for VecDeque<T> {}
impl<T: EncodeLike<U>, U: Encode> EncodeLike<VecDeque<U>> for Vec<T> {}
impl<T: Encode> Encode for VecDeque<T> {
fn size_hint(&self) -> usize {
mem::size_of::<u32>() + mem::size_of::<T>() * self.len()
}
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
compact_encode_len_to(dest, self.len()).expect("Compact encodes length");
macro_rules! encode_to {
( $ty:ty, $self:ident, $dest:ident ) => {{
if cfg!(target_endian = "little") || mem::size_of::<T>() == 1 {
let slices = $self.as_slices();
let typed = unsafe {
core::mem::transmute::<(&[T], &[T]), (&[$ty], &[$ty])>(slices)
};
$dest.write(<[$ty] as AsByteSlice<$ty>>::as_byte_slice(typed.0));
$dest.write(<[$ty] as AsByteSlice<$ty>>::as_byte_slice(typed.1));
} else {
for item in $self {
item.encode_to($dest);
}
}
}};
}
with_type_info! {
<T as Encode>::TYPE_INFO,
encode_to(self, dest),
{
for item in self {
item.encode_to(dest);
}
},
}
}
}
impl<T: Decode> Decode for VecDeque<T> {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Ok(<Vec<T>>::decode(input)?.into())
}
}
impl EncodeLike for () {}
impl Encode for () {
fn encode_to<W: Output + ?Sized>(&self, _dest: &mut W) {
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(&[])
}
fn encode(&self) -> Vec<u8> {
Vec::new()
}
}
impl Decode for () {
fn decode<I: Input>(_: &mut I) -> Result<(), Error> {
Ok(())
}
}
macro_rules! impl_len {
( $( $type:ident< $($g:ident),* > ),* ) => { $(
impl<$($g),*> DecodeLength for $type<$($g),*> {
fn len(mut self_encoded: &[u8]) -> Result<usize, Error> {
usize::try_from(u32::from(Compact::<u32>::decode(&mut self_encoded)?))
.map_err(|_| "Failed convert decoded size into usize.".into())
}
}
)*}
}
// Collection types that support compact decode length.
impl_len!(Vec<T>, BTreeSet<T>, BTreeMap<K, V>, VecDeque<T>, BinaryHeap<T>, LinkedList<T>);
macro_rules! tuple_impl {
(
($one:ident, $extra:ident),
) => {
impl<$one: Encode> Encode for ($one,) {
fn size_hint(&self) -> usize {
self.0.size_hint()
}
fn encode_to<T: Output + ?Sized>(&self, dest: &mut T) {
self.0.encode_to(dest);
}
fn encode(&self) -> Vec<u8> {
self.0.encode()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
self.0.using_encoded(f)
}
}
impl<$one: Decode> Decode for ($one,) {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
match $one::decode(input) {
Err(e) => Err(e),
Ok($one) => Ok(($one,)),
}
}
}
impl<$one: DecodeLength> DecodeLength for ($one,) {
fn len(self_encoded: &[u8]) -> Result<usize, Error> {
$one::len(self_encoded)
}
}
impl<$one: EncodeLike<$extra>, $extra: Encode> crate::EncodeLike<($extra,)> for ($one,) {}
};
(($first:ident, $fextra:ident), $( ( $rest:ident, $rextra:ident ), )+) => {
impl<$first: Encode, $($rest: Encode),+> Encode for ($first, $($rest),+) {
fn size_hint(&self) -> usize {
let (
ref $first,
$(ref $rest),+
) = *self;
$first.size_hint()
$( + $rest.size_hint() )+
}
fn encode_to<T: Output + ?Sized>(&self, dest: &mut T) {
let (
ref $first,
$(ref $rest),+
) = *self;
$first.encode_to(dest);
$($rest.encode_to(dest);)+
}
}
impl<$first: Decode, $($rest: Decode),+> Decode for ($first, $($rest),+) {
fn decode<INPUT: Input>(input: &mut INPUT) -> Result<Self, super::Error> {
Ok((
match $first::decode(input) {
Ok(x) => x,
Err(e) => return Err(e),
},
$(match $rest::decode(input) {
Ok(x) => x,
Err(e) => return Err(e),
},)+
))
}
}
impl<$first: EncodeLike<$fextra>, $fextra: Encode,
$($rest: EncodeLike<$rextra>, $rextra: Encode),+> crate::EncodeLike<($fextra, $( $rextra ),+)>
for ($first, $($rest),+) {}
impl<$first: DecodeLength, $($rest),+> DecodeLength for ($first, $($rest),+) {
fn len(self_encoded: &[u8]) -> Result<usize, Error> {
$first::len(self_encoded)
}
}
tuple_impl!( $( ($rest, $rextra), )+ );
}
}
#[allow(non_snake_case)]
mod inner_tuple_impl {
use super::*;
tuple_impl!(
(A0, A1), (B0, B1), (C0, C1), (D0, D1), (E0, E1), (F0, F1), (G0, G1), (H0, H1), (I0, I1),
(J0, J1), (K0, K1), (L0, L1), (M0, M1), (N0, N1), (O0, O1), (P0, P1), (Q0, Q1), (R0, R1),
);
}
macro_rules! impl_endians {
( $( $t:ty; $ty_info:ident ),* ) => { $(
impl EncodeLike for $t {}
impl Encode for $t {
const TYPE_INFO: TypeInfo = TypeInfo::$ty_info;
fn size_hint(&self) -> usize {
mem::size_of::<$t>()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let buf = self.to_le_bytes();
f(&buf[..])
}
}
impl Decode for $t {
const TYPE_INFO: TypeInfo = TypeInfo::$ty_info;
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let mut buf = [0u8; mem::size_of::<$t>()];
input.read(&mut buf)?;
Ok(<$t>::from_le_bytes(buf))
}
fn encoded_fixed_size() -> Option<usize> {
Some(mem::size_of::<$t>())
}
}
)* }
}
macro_rules! impl_one_byte {
( $( $t:ty; $ty_info:ident ),* ) => { $(
impl EncodeLike for $t {}
impl Encode for $t {
const TYPE_INFO: TypeInfo = TypeInfo::$ty_info;
fn size_hint(&self) -> usize {
mem::size_of::<$t>()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(&[*self as u8][..])
}
}
impl Decode for $t {
const TYPE_INFO: TypeInfo = TypeInfo::$ty_info;
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
Ok(input.read_byte()? as $t)
}
}
)* }
}
impl_endians!(u16; U16, u32; U32, u64; U64, u128; U128, i16; I16, i32; I32, i64; I64, i128; I128);
impl_one_byte!(u8; U8, i8; I8);
impl_endians!(f32; F32, f64; F64);
impl EncodeLike for bool {}
impl Encode for bool {
fn size_hint(&self) -> usize {
mem::size_of::<bool>()
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
f(&[*self as u8][..])
}
}
impl Decode for bool {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let byte = input.read_byte()?;
match byte {
0 => Ok(false),
1 => Ok(true),
_ => Err("Invalid boolean representation".into())
}
}
fn encoded_fixed_size() -> Option<usize> {
Some(1)
}
}
impl Encode for Duration {
fn size_hint(&self) -> usize {
mem::size_of::<u64>() + mem::size_of::<u32>()
}
fn encode(&self) -> Vec<u8> {
let secs = self.as_secs();
let nanos = self.subsec_nanos();
(secs, nanos).encode()
}
}
impl Decode for Duration {
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let (secs, nanos) = <(u64, u32)>::decode(input)
.map_err(|e| e.chain("Could not decode `Duration(u64, u32)`"))?;
if nanos >= A_BILLION {
Err("Could not decode `Duration`: Number of nanoseconds should not be higher than 10^9.".into())
} else {
Ok(Duration::new(secs, nanos))
}
}
}
impl EncodeLike for Duration {}
impl<T> Encode for Range<T>
where
T: Encode
{
fn size_hint(&self) -> usize {
2 * mem::size_of::<T>()
}
fn encode(&self) -> Vec<u8> {
(&self.start, &self.end).encode()
}
}
impl<T> Decode for Range<T>
where
T: Decode
{
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let (start, end) = <(T, T)>::decode(input)
.map_err(|e| e.chain("Could not decode `Range<T>`"))?;
Ok(Range { start, end })
}
}
impl<T> Encode for RangeInclusive<T>
where
T: Encode
{
fn size_hint(&self) -> usize {
2 * mem::size_of::<T>()
}
fn encode(&self) -> Vec<u8> {
(self.start(), self.end()).encode()
}
}
impl<T> Decode for RangeInclusive<T>
where
T: Decode
{
fn decode<I: Input>(input: &mut I) -> Result<Self, Error> {
let (start, end) = <(T, T)>::decode(input)
.map_err(|e| e.chain("Could not decode `RangeInclusive<T>`"))?;
Ok(RangeInclusive::new(start, end))
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::borrow::Cow;
#[test]
fn vec_is_sliceable() {
let v = b"Hello world".to_vec();
v.using_encoded(|ref slice|
assert_eq!(slice, &b"\x2cHello world")
);
}
#[test]
fn encode_borrowed_tuple() {
let x = vec![1u8, 2, 3, 4];
let y = 128i64;
let encoded = (&x, &y).encode();
assert_eq!((x, y), Decode::decode(&mut &encoded[..]).unwrap());
}
#[test]
fn cow_works() {
let x = &[1u32, 2, 3, 4, 5, 6][..];
let y = Cow::Borrowed(&x);
assert_eq!(x.encode(), y.encode());
let z: Cow<'_, [u32]> = Cow::decode(&mut &x.encode()[..]).unwrap();
assert_eq!(*z, *x);
}
#[test]
fn cow_string_works() {
let x = "Hello world!";
let y = Cow::Borrowed(&x);
assert_eq!(x.encode(), y.encode());
let z: Cow<'_, str> = Cow::decode(&mut &x.encode()[..]).unwrap();
assert_eq!(*z, *x);
}
fn hexify(bytes: &[u8]) -> String {
bytes.iter().map(|ref b| format!("{:02x}", b)).collect::<Vec<String>>().join(" ")
}
#[test]
fn string_encoded_as_expected() {
let value = String::from("Hello, World!");
let encoded = value.encode();
assert_eq!(hexify(&encoded), "34 48 65 6c 6c 6f 2c 20 57 6f 72 6c 64 21");
assert_eq!(<String>::decode(&mut &encoded[..]).unwrap(), value);
}
#[test]
fn vec_of_u8_encoded_as_expected() {
let value = vec![0u8, 1, 1, 2, 3, 5, 8, 13, 21, 34];
let encoded = value.encode();
assert_eq!(hexify(&encoded), "28 00 01 01 02 03 05 08 0d 15 22");
assert_eq!(<Vec<u8>>::decode(&mut &encoded[..]).unwrap(), value);
}
#[test]
fn vec_of_i16_encoded_as_expected() {
let value = vec![0i16, 1, -1, 2, -2, 3, -3];
let encoded = value.encode();
assert_eq!(hexify(&encoded), "1c 00 00 01 00 ff ff 02 00 fe ff 03 00 fd ff");
assert_eq!(<Vec<i16>>::decode(&mut &encoded[..]).unwrap(), value);
}
#[test]
fn vec_of_option_int_encoded_as_expected() {
let value = vec![Some(1i8), Some(-1), None];
let encoded = value.encode();
assert_eq!(hexify(&encoded), "0c 01 01 01 ff 00");
assert_eq!(<Vec<Option<i8>>>::decode(&mut &encoded[..]).unwrap(), value);
}
#[test]
fn vec_of_option_bool_encoded_as_expected() {
let value = vec![OptionBool(Some(true)), OptionBool(Some(false)), OptionBool(None)];
let encoded = value.encode();
assert_eq!(hexify(&encoded), "0c 01 02 00");
assert_eq!(<Vec<OptionBool>>::decode(&mut &encoded[..]).unwrap(), value);
}
#[cfg(feature = "bytes")]
#[test]
fn bytes_works_as_expected() {
let input = bytes::Bytes::from_static(b"hello");
let encoded = Encode::encode(&input);
let encoded_vec = input.to_vec().encode();
assert_eq!(encoded, encoded_vec);
assert_eq!(
&b"hello"[..],
bytes::Bytes::decode(&mut &encoded[..]).unwrap(),
);
}
#[cfg(feature = "bytes")]
#[test]
fn bytes_deserialized_from_bytes_is_zero_copy() {
let encoded = bytes::Bytes::from(Encode::encode(&b"hello".to_vec()));
let decoded = decode_from_bytes::<bytes::Bytes>(encoded.clone()).unwrap();
assert_eq!(decoded, &b"hello"[..]);
// The `slice_ref` will panic if the `decoded` is not a subslice of `encoded`.
assert_eq!(encoded.slice_ref(&decoded), &b"hello"[..]);
}
#[cfg(feature = "bytes")]
#[test]
fn nested_bytes_deserialized_from_bytes_is_zero_copy() {
let encoded = bytes::Bytes::from(Encode::encode(&Some(b"hello".to_vec())));
let decoded = decode_from_bytes::<Option<bytes::Bytes>>(encoded.clone()).unwrap();
let decoded = decoded.as_ref().unwrap();
assert_eq!(decoded, &b"hello"[..]);
// The `slice_ref` will panic if the `decoded` is not a subslice of `encoded`.
assert_eq!(encoded.slice_ref(&decoded), &b"hello"[..]);
}
fn test_encode_length<T: Encode + Decode + DecodeLength>(thing: &T, len: usize) {
assert_eq!(<T as DecodeLength>::len(&thing.encode()[..]).unwrap(), len);
}
#[test]
fn len_works_for_decode_collection_types() {
let vector = vec![10; 10];
let mut btree_map: BTreeMap<u32, u32> = BTreeMap::new();
btree_map.insert(1, 1);
btree_map.insert(2, 2);
let mut btree_set: BTreeSet<u32> = BTreeSet::new();
btree_set.insert(1);
btree_set.insert(2);
let mut vd = VecDeque::new();
vd.push_front(1);
vd.push_front(2);
let mut bh = BinaryHeap::new();
bh.push(1);
bh.push(2);
let mut ll = LinkedList::new();
ll.push_back(1);
ll.push_back(2);
let t1: (Vec<_>,) = (vector.clone(),);
let t2: (Vec<_>, u32) = (vector.clone(), 3u32);
test_encode_length(&vector, 10);
test_encode_length(&btree_map, 2);
test_encode_length(&btree_set, 2);
test_encode_length(&vd, 2);
test_encode_length(&bh, 2);
test_encode_length(&ll, 2);
test_encode_length(&t1, 10);
test_encode_length(&t2, 10);
}
#[test]
fn vec_of_string_encoded_as_expected() {
let value = vec![
"Hamlet".to_owned(),
"Война и мир".to_owned(),
"三国演义".to_owned(),
"أَلْف لَيْلَة وَلَيْلَة".to_owned()
];
let encoded = value.encode();
assert_eq!(hexify(&encoded), "10 18 48 61 6d 6c 65 74 50 d0 92 d0 be d0 b9 d0 bd d0 b0 20 d0 \
b8 20 d0 bc d0 b8 d1 80 30 e4 b8 89 e5 9b bd e6 bc 94 e4 b9 89 bc d8 a3 d9 8e d9 84 d9 92 \
d9 81 20 d9 84 d9 8e d9 8a d9 92 d9 84 d9 8e d8 a9 20 d9 88 d9 8e d9 84 d9 8e d9 8a d9 92 \
d9 84 d9 8e d8 a9 e2 80 8e");
assert_eq!(<Vec<String>>::decode(&mut &encoded[..]).unwrap(), value);
}
#[derive(Debug, PartialEq)]
struct MyWrapper(Compact<u32>);
impl Deref for MyWrapper {
type Target = Compact<u32>;
fn deref(&self) -> &Self::Target { &self.0 }
}
impl WrapperTypeEncode for MyWrapper {}
impl From<Compact<u32>> for MyWrapper {
fn from(c: Compact<u32>) -> Self { MyWrapper(c) }
}
impl WrapperTypeDecode for MyWrapper {
type Wrapped = Compact<u32>;
}
#[test]
fn should_work_for_wrapper_types() {
let result = vec![0b1100];
assert_eq!(MyWrapper(3u32.into()).encode(), result);
assert_eq!(MyWrapper::decode(&mut &*result).unwrap(), MyWrapper(3_u32.into()));
}
#[test]
fn codec_vec_deque_u8_and_u16() {
let mut v_u8 = VecDeque::new();
let mut v_u16 = VecDeque::new();
for i in 0..50 {
v_u8.push_front(i as u8);
v_u16.push_front(i as u16);
}
for i in 50..100 {
v_u8.push_back(i as u8);
v_u16.push_back(i as u16);
}
assert_eq!(Decode::decode(&mut &v_u8.encode()[..]), Ok(v_u8));
assert_eq!(Decode::decode(&mut &v_u16.encode()[..]), Ok(v_u16));
}
#[test]
fn codec_iterator() {
let t1: BTreeSet<u32> = FromIterator::from_iter((0..10).flat_map(|i| 0..i));
let t2: LinkedList<u32> = FromIterator::from_iter((0..10).flat_map(|i| 0..i));
let t3: BinaryHeap<u32> = FromIterator::from_iter((0..10).flat_map(|i| 0..i));
let t4: BTreeMap<u16, u32> = FromIterator::from_iter(
(0..10)
.flat_map(|i| 0..i)
.map(|i| (i as u16, i + 10))
);
let t5: BTreeSet<Vec<u8>> = FromIterator::from_iter((0..10).map(|i| Vec::from_iter(0..i)));
let t6: LinkedList<Vec<u8>> = FromIterator::from_iter((0..10).map(|i| Vec::from_iter(0..i)));
let t7: BinaryHeap<Vec<u8>> = FromIterator::from_iter((0..10).map(|i| Vec::from_iter(0..i)));
let t8: BTreeMap<Vec<u8>, u32> = FromIterator::from_iter(
(0..10)
.map(|i| Vec::from_iter(0..i))
.map(|i| (i.clone(), i.len() as u32))
);
assert_eq!(Decode::decode(&mut &t1.encode()[..]), Ok(t1));
assert_eq!(Decode::decode(&mut &t2.encode()[..]), Ok(t2));
assert_eq!(
Decode::decode(&mut &t3.encode()[..]).map(BinaryHeap::into_sorted_vec),
Ok(t3.into_sorted_vec()),
);
assert_eq!(Decode::decode(&mut &t4.encode()[..]), Ok(t4));
assert_eq!(Decode::decode(&mut &t5.encode()[..]), Ok(t5));
assert_eq!(Decode::decode(&mut &t6.encode()[..]), Ok(t6));
assert_eq!(
Decode::decode(&mut &t7.encode()[..]).map(BinaryHeap::into_sorted_vec),
Ok(t7.into_sorted_vec()),
);
assert_eq!(Decode::decode(&mut &t8.encode()[..]), Ok(t8));
}
#[test]
fn io_reader() {
let mut io_reader = IoReader(std::io::Cursor::new(&[1u8, 2, 3][..]));
let mut v = [0; 2];
io_reader.read(&mut v[..]).unwrap();
assert_eq!(v, [1, 2]);
assert_eq!(io_reader.read_byte().unwrap(), 3);
assert_eq!(io_reader.read_byte(), Err("io error: UnexpectedEof".into()));
}
#[test]
fn shared_references_implement_encode() {
Arc::new(10u32).encode();
Rc::new(10u32).encode();
}
#[test]
fn not_limit_input_test() {
use crate::Input;
struct NoLimit<'a>(&'a [u8]);
impl<'a> Input for NoLimit<'a> {
fn remaining_len(&mut self) -> Result<Option<usize>, Error> {
Ok(None)
}
fn read(&mut self, into: &mut [u8]) -> Result<(), Error> {
self.0.read(into)
}
}
let len = MAX_PREALLOCATION * 2 + 1;
let mut i = Compact(len as u32).encode();
i.resize(i.len() + len, 0);
assert_eq!(<Vec<u8>>::decode(&mut NoLimit(&i[..])).unwrap(), vec![0u8; len]);
let i = Compact(len as u32).encode();
assert_eq!(
<Vec<u8>>::decode(&mut NoLimit(&i[..])).err().unwrap().to_string(),
"Not enough data to fill buffer",
);
let i = Compact(1000u32).encode();
assert_eq!(
<Vec<u8>>::decode(&mut NoLimit(&i[..])).err().unwrap().to_string(),
"Not enough data to fill buffer",
);
}
#[test]
fn boolean() {
assert_eq!(true.encode(), vec![1]);
assert_eq!(false.encode(), vec![0]);
assert_eq!(bool::decode(&mut &[1][..]).unwrap(), true);
assert_eq!(bool::decode(&mut &[0][..]).unwrap(), false);
}
#[test]
fn some_encode_like() {
fn t<B: EncodeLike>() {}
t::<&[u8]>();
t::<&str>();
t::<NonZeroU32>();
}
#[test]
fn vec_deque_encode_like_vec() {
let data: VecDeque<u32> = vec![1, 2, 3, 4, 5, 6].into();
let encoded = data.encode();
let decoded = Vec::<u32>::decode(&mut &encoded[..]).unwrap();
assert!(decoded.iter().all(|v| data.contains(&v)));
assert_eq!(data.len(), decoded.len());
let encoded = decoded.encode();
let decoded = VecDeque::<u32>::decode(&mut &encoded[..]).unwrap();
assert_eq!(data, decoded);
}
#[test]
fn vec_decode_right_capacity() {
let data: Vec<u32> = vec![1, 2, 3];
let mut encoded = data.encode();
encoded.resize(encoded.len() * 2, 0);
let decoded = Vec::<u32>::decode(&mut &encoded[..]).unwrap();
assert_eq!(data, decoded);
assert_eq!(decoded.capacity(), decoded.len());
// Check with non-integer type
let data: Vec<String> = vec!["1".into(), "2".into(), "3".into()];
let mut encoded = data.encode();
encoded.resize(65536, 0);
let decoded = Vec::<String>::decode(&mut &encoded[..]).unwrap();
assert_eq!(data, decoded);
assert_eq!(decoded.capacity(), decoded.len());
}
#[test]
fn duration() {
let num_secs = 13;
let num_nanos = 37;
let duration = Duration::new(num_secs, num_nanos);
let expected = (num_secs, num_nanos as u32).encode();
assert_eq!(duration.encode(), expected);
assert_eq!(Duration::decode(&mut &expected[..]).unwrap(), duration);
}
#[test]
fn malformed_duration_encoding_fails() {
// This test should fail, as the number of nanoseconds encoded is exactly 10^9.
let invalid_nanos = A_BILLION;
let encoded = (0u64, invalid_nanos).encode();
assert!(Duration::decode(&mut &encoded[..]).is_err());
let num_secs = 1u64;
let num_nanos = 37u32;
let invalid_nanos = num_secs as u32 * A_BILLION + num_nanos;
let encoded = (num_secs, invalid_nanos).encode();
// This test should fail, as the number of nano seconds encoded is bigger than 10^9.
assert!(Duration::decode(&mut &encoded[..]).is_err());
// Now constructing a valid duration and encoding it. Those asserts should not fail.
let duration = Duration::from_nanos(invalid_nanos as u64);
let expected = (num_secs, num_nanos).encode();
assert_eq!(duration.encode(), expected);
assert_eq!(Duration::decode(&mut &expected[..]).unwrap(), duration);
}
#[test]
fn u64_max() {
let num_secs = u64::MAX;
let num_nanos = 0;
let duration = Duration::new(num_secs, num_nanos);
let expected = (num_secs, num_nanos).encode();
assert_eq!(duration.encode(), expected);
assert_eq!(Duration::decode(&mut &expected[..]).unwrap(), duration);
}
#[test]
fn decoding_does_not_overflow() {
let num_secs = u64::MAX;
let num_nanos = A_BILLION;
// `num_nanos`' carry should make `num_secs` overflow if we were to call `Duration::new()`.
// This test checks that the we do not call `Duration::new()`.
let encoded = (num_secs, num_nanos).encode();
assert!(Duration::decode(&mut &encoded[..]).is_err());
}
#[test]
fn string_invalid_utf8() {
// `167, 10` is not a valid utf8 sequence, so this should be an error.
let mut bytes: &[u8] = &[20, 114, 167, 10, 20, 114];
let obj = <String>::decode(&mut bytes);
assert!(obj.is_err());
}
#[test]
fn empty_array_encode_and_decode() {
let data: [u32; 0] = [];
let encoded = data.encode();
assert!(encoded.is_empty());
<[u32; 0]>::decode(&mut &encoded[..]).unwrap();
}
macro_rules! test_array_encode_and_decode {
( $( $name:ty ),* $(,)? ) => {
$(
paste::item! {
#[test]
fn [<test_array_encode_and_decode _ $name>]() {
let data: [$name; 32] = [123 as $name; 32];
let encoded = data.encode();
let decoded: [$name; 32] = Decode::decode(&mut &encoded[..]).unwrap();
assert_eq!(decoded, data);
}
}
)*
}
}
test_array_encode_and_decode!(u8, i8, u16, i16, u32, i32, u64, i64, u128, i128);
test_array_encode_and_decode!(f32, f64);
fn test_encoded_size(val: impl Encode) {
let length = val.using_encoded(|v| v.len());
assert_eq!(length, val.encoded_size());
}
struct TestStruct {
data: Vec<u32>,
other: u8,
compact: Compact<u128>,
}
impl Encode for TestStruct {
fn encode_to<W: Output + ?Sized>(&self, dest: &mut W) {
self.data.encode_to(dest);
self.other.encode_to(dest);
self.compact.encode_to(dest);
}
}
#[test]
fn encoded_size_works() {
test_encoded_size(120u8);
test_encoded_size(30u16);
test_encoded_size(1u32);
test_encoded_size(2343545u64);
test_encoded_size(34358394245459854u128);
test_encoded_size(vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10u32]);
test_encoded_size(Compact(32445u32));
test_encoded_size(Compact(34353454453545u128));
test_encoded_size(TestStruct {
data: vec![1, 2, 4, 5, 6],
other: 45,
compact: Compact(123234545),
});
}
#[test]
fn ranges() {
let range = Range { start: 1, end: 100 };
let range_bytes = (1, 100).encode();
assert_eq!(range.encode(), range_bytes);
assert_eq!(Range::decode(&mut &range_bytes[..]), Ok(range));
let range_inclusive = RangeInclusive::new(1, 100);
let range_inclusive_bytes = (1, 100).encode();
assert_eq!(range_inclusive.encode(), range_inclusive_bytes);
assert_eq!(RangeInclusive::decode(&mut &range_inclusive_bytes[..]), Ok(range_inclusive));
}
}
|
use std::io;
use std::io::prelude::*;
use std::fs::File;
fn main() {
let x = frequency(0,readfile());
println!("{}", x);
}
fn readfile() -> Vec<String>{
let file = File::open("input.txt").unwrap();;
let reader = io::BufReader::new(file);
reader.lines().map(|l| l.expect("Could not parse line")).collect()
}
fn frequency(start:i32, input: Vec<String>) -> i32 {
let mut result = start;
for val in input.iter() {
result += val.parse::<i32>().unwrap();
}
return result
}
#[cfg(test)]
mod tests;
|
use std::collections::HashMap;
fn get_bottles(n: i32) -> HashMap<&'static str, String> {
let mut h = HashMap::new();
// https://stackoverflow.com/a/38908324
for current in (0..100).rev() {
let mut next: i32 = current - 1;
if current == 0 {
next = 99;
println!("current {} next {}", current, next);
}
if n == current {
if current == 0 {
h.insert("current", format!("No more bottles"));
} else if current == 1 {
h.insert("current", format!("{} bottle", current));
} else {
h.insert("current", format!("{} bottles", current));
}
if next == 0 {
h.insert("next", format!("no more bottles"));
} else if next == 1 {
h.insert("next", format!("{} bottle", next));
} else {
h.insert("next", format!("{} bottles", next));
}
break;
}
}
h
}
pub fn verse(n: i32) -> String {
let start = format!(
"{} of beer on the wall, {} of beer.",
get_bottles(n).get("current").unwrap(),
get_bottles(n).get("current").unwrap().to_lowercase()
);
let end =
format!(
" {} of beer on the wall.",
get_bottles(n).get("next").unwrap(),
);
let action;
match n {
0 => {
action = format!("Go to the store and buy some more");
}
1 => {
action = format!("Take it down and pass it around");
}
_ => {
action = format!("Take one down and pass it around");
}
}
return format!("{}\n{},{}\n", start, action, end);
}
pub fn sing(start: i32, end: i32) -> String {
let mut s = String::new();
for i in (end..start + 1).rev() {
s.push_str(&verse(i));
if i != end {
s.push_str("\n");
}
}
s
}
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
mod helpers;
use crate::helpers::configuration::*;
use crate::helpers::request_handling::*;
use crate::helpers::virustotal::*;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>>{
let load_config = get_conf().expect("erro loading config file");
let virustotal_api_key = load_config.virustotal_api_key;
let src_type = String::from("ip");
let vt_ip_result = get_virustotal(String::from("8.8.8.8"),virustotal_api_key, src_type).await?;
println!("{:?}", vt_ip_result);
Ok(())
}
|
mod misc;
mod normal;
mod setup;
pub use normal::{Payload, PayloadBuilder};
pub use setup::{SetupPayload, SetupPayloadBuilder};
|
/**
* Authors: Jorge Martins && Diogo Lopes
* This example is from Vasconcelos, V.T. (and several others):
* "Behavioral Types in Programming Languages" (figures 2.4, 2.5 and 2.6)
*/
use crate::agency;
use crate::message::Decision;
use crate::message::Message;
use chan::Receiver;
use chan::Sender;
use chrono::prelude::*;
use rand::prelude::*;
use std::thread;
//Customer Address
pub struct Address {
country: String,
city: String,
street: String,
}
impl Address {
pub fn new(country: String, city: String, street: String) -> Address {
return Address {
country,
city,
street,
};
}
pub fn country(&self) -> String {
return self.country.clone();
}
pub fn city(&self) -> String {
return self.city.clone();
}
pub fn street(&self) -> String {
return self.street.clone();
}
}
//Customer order
pub fn order(
send: Sender<Message>,
recv: Receiver<Message>,
max_price: f64,
addr: Address,
journey_pref: String,
) {
let customer_send = send.clone();
let customer_recv = recv.clone();
println!("Starting the customer!");
let _ = thread::spawn(move || {
agency::sell(send, recv);
});
let mut price: f64;
loop {
let journey_pref_message = Message::JourneyPreference(journey_pref.clone());
customer_send.send(journey_pref_message);
let received: Message = customer_recv.recv().unwrap();
match received {
Message::JourneyPrice(p) => {
price = p;
println!("Price Offer: {}", price);
if eval_offer(&journey_pref, &price) {
break;
}
}
_ => (),
}
}
let customer_decision: Message;
if price < max_price {
customer_decision = Message::CustomerDecision(Decision::ACCEPT);
customer_send.send(customer_decision);
let address_message = Message::CustomerAddress(addr);
customer_send.send(address_message);
let received = customer_recv.recv().unwrap();
match received {
Message::JourneyDate(d) => {
println!(
"Flight date: {}, {}/{}/{}",
d.weekday(),
d.day(),
d.month(),
d.year()
);
}
_ => (),
}
} else {
customer_decision = Message::CustomerDecision(Decision::REJECT);
customer_send.send(customer_decision);
}
println!("Closing the customer!");
}
fn eval_offer(_journey_pref: &String, _price: &f64) -> bool {
let mut rng = rand::thread_rng();
return rng.gen::<f64>() < 0.5;
}
|
use crate::cars::data::CarComponent;
use crate::interaction::{FollowEntity, Movable, MovedEvent};
use crate::map_model::IntersectionComponent;
use crate::physics::{Kinematics, Transform};
use crate::rendering::meshrender_component::MeshRender;
use cgmath::Vector2;
use imgui::im_str;
use imgui::Ui;
use imgui_inspect::{InspectArgsDefault, InspectRenderDefault};
use specs::shrev::EventChannel;
use specs::{Component, Entity, World, WorldExt};
use std::marker::PhantomData;
pub struct ImDragf;
impl InspectRenderDefault<f32> for ImDragf {
fn render(
data: &[&f32],
label: &'static str,
_: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) {
if data.len() != 1 {
unimplemented!();
}
let mut cp = *data[0];
ui.drag_float(&im_str!("{}", label), &mut cp)
.speed(args.step.unwrap_or(0.1))
.build();
}
fn render_mut(
data: &mut [&mut f32],
label: &'static str,
_: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
unimplemented!();
}
ui.drag_float(&im_str!("{}", label), data[0])
.speed(args.step.unwrap_or(0.1))
.build()
}
}
impl InspectRenderDefault<f64> for ImDragf {
fn render(
data: &[&f64],
label: &'static str,
_: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) {
if data.len() != 1 {
unimplemented!();
}
let mut cp = *data[0] as f32;
ui.drag_float(&im_str!("{}", label), &mut cp)
.speed(args.step.unwrap_or(0.1))
.build();
}
fn render_mut(
data: &mut [&mut f64],
label: &'static str,
_: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
unimplemented!();
}
let mut cp = *data[0] as f32;
let changed = ui
.drag_float(&im_str!("{}", label), &mut cp)
.speed(args.step.unwrap_or(0.1))
.build();
*data[0] = cp as f64;
changed
}
}
pub struct ImCgVec2;
impl InspectRenderDefault<Vector2<f32>> for ImCgVec2 {
fn render(
data: &[&Vector2<f32>],
label: &'static str,
_: &mut World,
ui: &Ui,
_: &InspectArgsDefault,
) {
if data.len() != 1 {
unimplemented!();
}
let x = data[0];
imgui::InputFloat2::new(ui, &im_str!("{}", label), &mut [x.x, x.y])
.always_insert_mode(false)
.build();
}
fn render_mut(
data: &mut [&mut Vector2<f32>],
label: &'static str,
_: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
unimplemented!();
}
let x = &mut data[0];
let mut conv = [x.x, x.y];
let changed = ui
.drag_float2(&im_str!("{}", label), &mut conv)
.speed(args.step.unwrap_or(0.1))
.build();
x.x = conv[0];
x.y = conv[1];
changed
}
}
pub struct ImEntity;
impl InspectRenderDefault<Entity> for ImEntity {
fn render(
data: &[&Entity],
label: &'static str,
_: &mut World,
ui: &Ui,
_args: &InspectArgsDefault,
) {
if data.len() != 1 {
unimplemented!();
}
ui.text(&im_str!("{:?} {}", *data[0], label));
}
fn render_mut(
data: &mut [&mut Entity],
label: &'static str,
_: &mut World,
ui: &Ui,
_: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
unimplemented!();
}
ui.text(&im_str!("{:?} {}", *data[0], label));
false
}
}
pub struct ImVec<T> {
_phantom: PhantomData<T>,
}
impl<T: InspectRenderDefault<T>> InspectRenderDefault<Vec<T>> for ImVec<T> {
fn render(
_data: &[&Vec<T>],
_label: &'static str,
_: &mut World,
_ui: &Ui,
_args: &InspectArgsDefault,
) {
unimplemented!()
}
fn render_mut(
data: &mut [&mut Vec<T>],
label: &str,
w: &mut World,
ui: &Ui,
args: &InspectArgsDefault,
) -> bool {
if data.len() != 1 {
unimplemented!();
}
let v = &mut data[0];
if ui.collapsing_header(&im_str!("{}", label)).build() {
ui.indent();
for (i, x) in v.iter_mut().enumerate() {
let id = ui.push_id(i as i32);
<T as InspectRenderDefault<T>>::render_mut(&mut [x], "", w, ui, args);
id.pop(ui);
}
ui.unindent();
}
false
}
}
#[rustfmt::skip]
macro_rules! empty_inspect_impl {
($x : ty) => {
impl imgui_inspect::InspectRenderDefault<$x> for $x {
fn render(_: &[&$x], _: &'static str, _: &mut specs::World, ui: &imgui::Ui, _: &imgui_inspect::InspectArgsDefault) {
ui.text(std::stringify!($x))
}
fn render_mut(_: &mut [&mut $x], _: &'static str, _: &mut specs::World, ui: &imgui::Ui, _: &imgui_inspect::InspectArgsDefault) -> bool {
ui.text(std::stringify!($x));
false
}
}
};
}
pub struct InspectRenderer<'a, 'b> {
pub world: &'a mut World,
pub entity: Entity,
pub ui: &'b Ui<'b>,
}
fn clone_and_modify<T: Component + Clone>(
world: &mut World,
entity: Entity,
f: impl FnOnce(&mut World, T) -> T,
) {
let c = world.write_component::<T>().get_mut(entity).cloned();
if let Some(x) = c {
let m = f(world, x);
*world.write_component::<T>().get_mut(entity).unwrap() = m;
}
}
impl<'a, 'b> InspectRenderer<'a, 'b> {
fn inspect_component<T: Component + Clone + InspectRenderDefault<T>>(&mut self) {
let ui = self.ui;
clone_and_modify(self.world, self.entity, |world, mut x| {
<T as InspectRenderDefault<T>>::render_mut(
&mut [&mut x],
std::any::type_name::<T>().split("::").last().unwrap_or(""),
world,
ui,
&InspectArgsDefault::default(),
);
x
});
}
pub fn render(mut self) {
let ui = self.ui;
let mut event = None;
clone_and_modify(self.world, self.entity, |world, mut x: Transform| {
let mut position = x.position();
if <ImCgVec2 as InspectRenderDefault<Vector2<f32>>>::render_mut(
&mut [&mut position],
"Pos",
world,
ui,
&InspectArgsDefault::default(),
) {
event = Some(position);
}
x.set_position(position);
x
});
if let Some(new_pos) = event {
self.world
.write_resource::<EventChannel<MovedEvent>>()
.single_write(MovedEvent {
entity: self.entity,
new_pos,
});
}
self.inspect_component::<CarComponent>();
self.inspect_component::<MeshRender>();
self.inspect_component::<Kinematics>();
self.inspect_component::<Movable>();
self.inspect_component::<IntersectionComponent>();
let follow = &mut self.world.write_resource::<FollowEntity>().0;
if follow.is_none() {
if ui.small_button(im_str!("Follow")) {
follow.replace(self.entity);
}
} else if ui.small_button(im_str!("Unfollow")) {
follow.take();
}
}
}
|
pure fn is_whitespace(c: char) -> bool {
const ch_space: char = '\u0020';
const ch_ogham_space_mark: char = '\u1680';
const ch_mongolian_vowel_sep: char = '\u180e';
const ch_en_quad: char = '\u2000';
const ch_em_quad: char = '\u2001';
const ch_en_space: char = '\u2002';
const ch_em_space: char = '\u2003';
const ch_three_per_em_space: char = '\u2004';
const ch_four_per_em_space: char = '\u2005';
const ch_six_per_em_space: char = '\u2006';
const ch_figure_space: char = '\u2007';
const ch_punctuation_space: char = '\u2008';
const ch_thin_space: char = '\u2009';
const ch_hair_space: char = '\u200a';
const ch_narrow_no_break_space: char = '\u202f';
const ch_medium_mathematical_space: char = '\u205f';
const ch_ideographic_space: char = '\u3000';
const ch_line_separator: char = '\u2028';
const ch_paragraph_separator: char = '\u2029';
const ch_character_tabulation: char = '\u0009';
const ch_line_feed: char = '\u000a';
const ch_line_tabulation: char = '\u000b';
const ch_form_feed: char = '\u000c';
const ch_carriage_return: char = '\u000d';
const ch_next_line: char = '\u0085';
const ch_no_break_space: char = '\u00a0';
if c == ch_space {
true
} else if c == ch_ogham_space_mark {
true
} else if c == ch_mongolian_vowel_sep {
true
} else if c == ch_en_quad {
true
} else if c == ch_em_quad {
true
} else if c == ch_en_space {
true
} else if c == ch_em_space {
true
} else if c == ch_three_per_em_space {
true
} else if c == ch_four_per_em_space {
true
} else if c == ch_six_per_em_space {
true
} else if c == ch_figure_space {
true
} else if c == ch_punctuation_space {
true
} else if c == ch_thin_space {
true
} else if c == ch_hair_space {
true
} else if c == ch_narrow_no_break_space {
true
} else if c == ch_medium_mathematical_space {
true
} else if c == ch_ideographic_space {
true
} else if c == ch_line_tabulation {
true
} else if c == ch_paragraph_separator {
true
} else if c == ch_character_tabulation {
true
} else if c == ch_line_feed {
true
} else if c == ch_line_tabulation {
true
} else if c == ch_form_feed {
true
} else if c == ch_carriage_return {
true
} else if c == ch_next_line {
true
} else if c == ch_no_break_space { true } else { false }
}
|
//! A series of re-exports to simplify usage of the framework.
//!
//! Some exports are renamed to avoid name conflicts as they are generic.
//! These include:
//!
//! - `Context` -> `FrameworkContext`
//! - `Error` -> `FrameworkError`
#[cfg(feature = "macros")]
pub use command_attr::{check, command, hook};
pub use crate::category::Category;
pub use crate::check::{Check, CheckResult, Reason};
pub use crate::command::{Command, CommandResult};
pub use crate::configuration::Configuration;
pub use crate::context::{CheckContext, Context as FrameworkContext};
pub use crate::error::{DispatchError, Error as FrameworkError};
pub use crate::Framework;
|
#![allow(unknown_lints)]
#![deny(unused_variables)]
#![deny(unused_mut)]
#![deny(clippy)]
#![deny(clippy_pedantic)]
#![allow(stutter)]
#![recursion_limit = "128"]
//!
//! Neon-serde
//! ==========
//!
//! This crate is a utility to easily convert values between
//!
//! A `Handle<JsValue>` from the `neon` crate
//! and any value implementing `serde::{Serialize, Deserialize}`
//!
//! ## Usage
//!
//! #### `neon_serde::from_handle`
//! Convert a `Handle<js::JsValue>` to
//! a type implementing `serde::Deserialize`
//!
//! #### `neon_serde::to_value`
//! Convert a value implementing `serde::Serialize` to
//! a `Handle<JsValue>`
//!
//!
//! ## Example
//!
//! ```rust,no_run
//! # #![allow(dead_code)]
//! extern crate neon_serde;
//! extern crate neon;
//! #[macro_use]
//! extern crate serde_derive;
//!
//! use neon::js::{JsValue, JsUndefined};
//! use neon::vm::{Call, JsResult};
//!
//! #[derive(Serialize, Debug, Deserialize)]
//! struct AnObject {
//! a: u32,
//! b: Vec<f64>,
//! c: String,
//! }
//!
//! fn deserialize_something(call: Call) -> JsResult<JsValue> {
//! let scope = call.scope;
//! let arg0 = call.arguments
//! .require(scope, 0)?
//! .check::<JsValue>()?;
//!
//! let arg0_value :AnObject = neon_serde::from_value(scope, arg0)?;
//! println!("{:?}", arg0_value);
//!
//! Ok(JsUndefined::new().upcast())
//! }
//!
//! fn serialize_something(call: Call) -> JsResult<JsValue> {
//! let scope = call.scope;
//! let value = AnObject {
//! a: 1,
//! b: vec![2f64, 3f64, 4f64],
//! c: "a string".into()
//! };
//!
//! let js_value = neon_serde::to_value(scope, &value)?;
//! Ok(js_value)
//! }
//!
//! # fn main () {
//! # }
//!
//! ```
//!
extern crate cast;
#[macro_use]
extern crate error_chain;
extern crate neon;
#[macro_use]
extern crate serde;
pub mod ser;
pub mod de;
pub mod errors;
mod macros;
pub use de::from_value;
pub use ser::to_value;
#[cfg(test)]
mod tests {
use super::*;
use neon::js::JsValue;
use neon::mem::Handle;
use neon::vm::{Call, JsResult};
#[test]
fn test_it_compiles() {
fn check<'j>(call: Call<'j>) -> JsResult<'j, JsValue> {
let scope = call.scope;
let result: () = {
let arg: Handle<'j, JsValue> = call.arguments.require(scope, 0)?;
let () = from_value(scope, arg)?;
()
};
let result: Handle<'j, JsValue> = to_value(scope, &result)?;
Ok(result)
}
let _ = check;
}
}
|
// Run-time:
// status: success
// stdin: abc
// stdout: Hello abc
use std::io::{Read, stdin};
fn main() {
let mut buf = String::new();
stdin().read_to_string(&mut buf).unwrap();
println!("Hello {}", buf);
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SloResponse : A service level objective response containing a single service level objective.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SloResponse {
#[serde(rename = "data", skip_serializing_if = "Option::is_none")]
pub data: Option<Box<crate::models::ServiceLevelObjective>>,
/// An array of error messages. Each endpoint documents how/whether this field is used.
#[serde(rename = "errors", skip_serializing_if = "Option::is_none")]
pub errors: Option<Vec<String>>,
}
impl SloResponse {
/// A service level objective response containing a single service level objective.
pub fn new() -> SloResponse {
SloResponse {
data: None,
errors: None,
}
}
}
|
#![allow(clippy::missing_safety_doc)]
#[cfg(test)]
mod tests;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint};
use curve25519_dalek::scalar::Scalar;
use rand::rngs::OsRng;
use std::collections::HashSet;
use std::convert::TryInto;
use std::slice;
pub const INTERNAL_ERROR: u8 = 1;
pub const PARAMS_ERROR: u8 = 2;
pub const VALIDATE_ERROR: u8 = 3;
#[cfg(not(test))]
macro_rules! catch_panic {
($code:block) => {{
let res = std::panic::catch_unwind(std::panic::AssertUnwindSafe(move || $code));
match res {
Ok(x) => x,
Err(e) => {
match e.downcast_ref::<&'static str>() {
Some(s) => eprintln!("INTERNAL ED25519-SECRET-SHARING ERROR: {}", s),
None => eprintln!("UNKNOWN INTERNAL ED25519-SECRET-SHARING ERROR!"),
}
INTERNAL_ERROR
}
}
}};
}
#[cfg(test)]
macro_rules! catch_panic {
($code:block) => {{
$code
}};
}
#[cfg(feature = "wasm")]
#[no_mangle]
pub unsafe extern "C" fn secret_sharing_malloc(size: usize) -> *mut u8 {
let mut vec: Vec<u8> = Vec::with_capacity(size + mem::size_of::<usize>());
let true_size = vec.capacity();
let ptr = vec.as_mut_ptr();
*(ptr as *mut usize) = true_size;
mem::forget(vec);
ptr.offset(mem::size_of::<usize>() as isize)
}
#[cfg(feature = "wasm")]
#[no_mangle]
pub unsafe extern "C" fn secret_sharing_free(ptr: *mut u8) {
if ptr.is_null() {
return;
}
let ptr = ptr.offset(-(mem::size_of::<usize>() as isize));
let size = *(ptr as *mut usize);
let _: Vec<u8> = Vec::from_raw_parts(ptr, 0, size);
}
#[no_mangle]
pub unsafe extern "C" fn secret_sharing_generate(
key: *const u8,
needed: u8,
shares: u8,
verification_out: *mut u8,
shares_out: *const *mut [u8; 32],
) -> u8 {
catch_panic!({
if needed == 0 || shares == 0 || needed > shares {
return PARAMS_ERROR;
}
let mut rng = OsRng;
let mut key_bytes = [0u8; 32];
key_bytes.copy_from_slice(&slice::from_raw_parts(key, 32));
let key = Scalar::from_bytes_mod_order(key_bytes);
let mut coeffs = Vec::with_capacity(needed.into());
coeffs.push(key);
for _ in 1..needed {
coeffs.push(Scalar::random(&mut rng));
}
if !verification_out.is_null() {
let verification_out =
slice::from_raw_parts_mut(verification_out as *mut [u8; 32], needed.into());
for (coeff, out) in coeffs.iter().zip(verification_out) {
let coeff_pub = coeff * &ED25519_BASEPOINT_TABLE;
*out = coeff_pub.compress().to_bytes();
}
}
let shares_out = slice::from_raw_parts(shares_out, shares.into());
for (i, &out) in (1..=shares).zip(shares_out) {
let x_scalar = Scalar::from(u64::from(i));
let mut curr_x_pow = Scalar::one();
let mut total = Scalar::zero();
for coeff in &coeffs {
total += curr_x_pow * coeff;
curr_x_pow *= &x_scalar;
}
*out = total.to_bytes();
}
0
})
}
#[no_mangle]
pub unsafe extern "C" fn secret_sharing_validate(
verification: *const u8,
verification_len: usize,
share: *const u8,
share_num: u8,
pubkey_out: *mut [u8; 32],
needed_shares_out: *mut u8,
) -> u8 {
catch_panic!({
if verification_len == 0
|| verification_len % 32 != 0
|| verification_len / 32 > u8::max_value().into()
{
return VALIDATE_ERROR;
}
if share_num == 0 {
// this would mean we just have the private key
return VALIDATE_ERROR;
}
let verification =
slice::from_raw_parts(verification as *const [u8; 32], verification_len / 32);
let x_scalar = Scalar::from(u64::from(share_num));
let mut curr_x_pow = Scalar::one();
let mut total = EdwardsPoint::default();
for &chunk in verification {
let coeff = match CompressedEdwardsY(chunk).decompress() {
Some(c) => c,
None => return VALIDATE_ERROR,
};
total += curr_x_pow * coeff;
curr_x_pow *= &x_scalar;
}
let share = slice::from_raw_parts(share, 32);
let mut share_bytes = [0u8; 32];
share_bytes.copy_from_slice(share);
let share = Scalar::from_bytes_mod_order(share_bytes);
if &share * &ED25519_BASEPOINT_TABLE != total {
return VALIDATE_ERROR;
}
if !pubkey_out.is_null() {
*pubkey_out = verification[0];
}
if !needed_shares_out.is_null() {
*needed_shares_out = verification
.len()
.try_into()
.expect("verification.len() didn't fit into a u8 despite earlier check");
}
0
})
}
#[no_mangle]
pub unsafe extern "C" fn secret_sharing_solve(
shares: *const *const [u8; 32],
share_numbers: *const u8,
num_shares: usize,
secret_key_out: *mut [u8; 32],
) -> u8 {
catch_panic!({
if num_shares == 0 {
return PARAMS_ERROR;
}
let shares = slice::from_raw_parts(shares, num_shares);
let share_numbers = slice::from_raw_parts(share_numbers, num_shares);
let share_numbers_set: HashSet<u8> = share_numbers.iter().cloned().collect();
if share_numbers_set.len() != num_shares || share_numbers_set.contains(&0) {
return PARAMS_ERROR;
}
let mut total = Scalar::zero();
for (&share, &share_num_int) in shares.iter().zip(share_numbers.iter()) {
let share_num = Scalar::from(u64::from(share_num_int));
let mut processed_part = Scalar::from_bytes_mod_order(*share);
// based on Lagrange basis polynomials, but optimized for x=0
for &other_share_num_int in share_numbers {
if share_num_int == other_share_num_int {
continue;
}
let other_share_num = Scalar::from(u64::from(other_share_num_int));
let denom = other_share_num - share_num;
processed_part *= other_share_num * denom.invert();
}
total += processed_part;
}
*secret_key_out = total.to_bytes();
0
})
}
|
use super::token::literal::{PrimitiveType, TerminalSymbol};
#[derive(Debug, Clone, PartialEq)]
pub struct CustomType {
name: String,
}
#[derive(Debug, Clone)]
pub enum Type {
Primitive(PrimitiveType),
Custom(CustomType),
// InvalidTypeError,
IgnoreType,
NoneType,
}
impl Type {
pub fn int() -> Self {
Type::Primitive(PrimitiveType::Int)
}
pub fn as_code(&self) -> String {
match self {
Type::Primitive(prime) => prime.as_code(),
_ => unimplemented!(),
}
}
// fn invalid(&self) -> bool {
// match self {
// Type::InvalidTypeError => true,
// _ => false,
// }
// }
}
impl PartialEq for Type {
fn eq(&self, other: &Self) -> bool {
// if self.invalid() || other.invalid() {
// return false;
// }
use Type::*;
match (self, other) {
(&Primitive(ref p), &Primitive(ref q)) => p == q,
(&Custom(ref c), &Custom(ref d)) => c == d,
(&IgnoreType, _) => true,
(_, &IgnoreType) => true,
_ => false,
}
}
}
impl std::fmt::Display for Type {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use Type::*;
match self {
Primitive(primitive) => {
write!(f, "{}", primitive.to_literal())
}
Custom(custom) => {
write!(f, "{}", custom.name)
}
IgnoreType => write!(f, "<invalid-type-error-occured>"),
NoneType => write!(f, "<none>"),
}
}
}
|
use super::expression::Expression;
use super::variable::{FreeVariable, GlobalVariable, LocalVariable};
use crate::ast_transform::Transformer;
use crate::scm::Scm;
use crate::source::SourceLocation;
use crate::symbol::Symbol;
use crate::syntax::{Reify, Variable};
use crate::utils::{Named, Sourced};
sum_type! {
#[derive(Debug, Clone, PartialEq)]
pub type Reference(Expression) = LocalReference
| GlobalReference
| FreeReference;
}
impl Reference {
pub fn default_transform(self, visitor: &mut impl Transformer) -> Self {
use Reference::*;
match self {
LocalReference(x) => x.default_transform(visitor).into(),
GlobalReference(x) => x.default_transform(visitor).into(),
FreeReference(x) => x.default_transform(visitor).into(),
}
}
pub fn var_name(&self) -> Symbol {
use Reference::*;
match self {
LocalReference(r) => r.var.name(),
GlobalReference(r) => r.var.name(),
FreeReference(r) => r.var.name(),
}
}
pub fn var(&self) -> Variable {
use Reference::*;
match self {
LocalReference(r) => r.var.clone().into(),
GlobalReference(r) => r.var.clone().into(),
FreeReference(r) => r.var.clone().into(),
}
}
}
impl Sourced for Reference {
fn source(&self) -> &SourceLocation {
use Reference::*;
match self {
LocalReference(x) => x.source(),
GlobalReference(x) => x.source(),
FreeReference(x) => x.source(),
}
}
}
#[derive(Debug, Clone)]
pub struct LocalReference {
pub var: LocalVariable,
pub span: SourceLocation,
}
impl_sourced!(LocalReference);
impl PartialEq for LocalReference {
fn eq(&self, other: &Self) -> bool {
self.var == other.var
}
}
impl LocalReference {
pub fn new(var: LocalVariable, span: SourceLocation) -> Self {
LocalReference { var, span }
}
pub fn default_transform(self, _visitor: &mut impl Transformer) -> Self {
self
}
}
#[derive(Debug, Clone)]
pub struct GlobalReference {
pub var: GlobalVariable,
pub span: SourceLocation,
}
impl_sourced!(GlobalReference);
impl PartialEq for GlobalReference {
fn eq(&self, other: &Self) -> bool {
self.var == other.var
}
}
impl GlobalReference {
pub fn new(var: GlobalVariable, span: SourceLocation) -> Self {
GlobalReference { var, span }
}
pub fn default_transform(self, _visitor: &mut impl Transformer) -> Self {
self
}
}
#[derive(Debug, Clone)]
pub struct FreeReference {
pub var: FreeVariable,
span: SourceLocation,
}
impl_sourced!(FreeReference);
impl PartialEq for FreeReference {
fn eq(&self, other: &Self) -> bool {
self.var == other.var
}
}
impl FreeReference {
pub fn new(var: FreeVariable, span: SourceLocation) -> Self {
FreeReference { var, span }
}
pub fn default_transform(self, _visitor: &mut impl Transformer) -> Self {
self
}
}
impl Reify for Reference {
fn reify(&self) -> Scm {
match self {
Reference::LocalReference(r) => Scm::Symbol(r.var.name()),
Reference::GlobalReference(r) => Scm::Symbol(r.var.name()),
Reference::FreeReference(r) => Scm::Symbol(r.var.name()),
}
}
}
|
#[doc = "Reader of register CTRL"]
pub type R = crate::R<u32, super::CTRL>;
#[doc = "Writer for register CTRL"]
pub type W = crate::W<u32, super::CTRL>;
#[doc = "Register CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::CTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `RCBLL_CTRL`"]
pub type RCBLL_CTRL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RCBLL_CTRL`"]
pub struct RCBLL_CTRL_W<'a> {
w: &'a mut W,
}
impl<'a> RCBLL_CTRL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `RCBLL_CPU_REQ`"]
pub type RCBLL_CPU_REQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RCBLL_CPU_REQ`"]
pub struct RCBLL_CPU_REQ_W<'a> {
w: &'a mut W,
}
impl<'a> RCBLL_CPU_REQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `CPU_SINGLE_WRITE`"]
pub type CPU_SINGLE_WRITE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CPU_SINGLE_WRITE`"]
pub struct CPU_SINGLE_WRITE_W<'a> {
w: &'a mut W,
}
impl<'a> CPU_SINGLE_WRITE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `CPU_SINGLE_READ`"]
pub type CPU_SINGLE_READ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CPU_SINGLE_READ`"]
pub struct CPU_SINGLE_READ_W<'a> {
w: &'a mut W,
}
impl<'a> CPU_SINGLE_READ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `ALLOW_CPU_ACCESS_TX_RX`"]
pub type ALLOW_CPU_ACCESS_TX_RX_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ALLOW_CPU_ACCESS_TX_RX`"]
pub struct ALLOW_CPU_ACCESS_TX_RX_W<'a> {
w: &'a mut W,
}
impl<'a> ALLOW_CPU_ACCESS_TX_RX_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `ENABLE_RADIO_BOD`"]
pub type ENABLE_RADIO_BOD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLE_RADIO_BOD`"]
pub struct ENABLE_RADIO_BOD_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_RADIO_BOD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
impl R {
#[doc = "Bit 0 - N/A"]
#[inline(always)]
pub fn rcbll_ctrl(&self) -> RCBLL_CTRL_R {
RCBLL_CTRL_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - N/A"]
#[inline(always)]
pub fn rcbll_cpu_req(&self) -> RCBLL_CPU_REQ_R {
RCBLL_CPU_REQ_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - N/A"]
#[inline(always)]
pub fn cpu_single_write(&self) -> CPU_SINGLE_WRITE_R {
CPU_SINGLE_WRITE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - N/A"]
#[inline(always)]
pub fn cpu_single_read(&self) -> CPU_SINGLE_READ_R {
CPU_SINGLE_READ_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - N/A"]
#[inline(always)]
pub fn allow_cpu_access_tx_rx(&self) -> ALLOW_CPU_ACCESS_TX_RX_R {
ALLOW_CPU_ACCESS_TX_RX_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - N/A"]
#[inline(always)]
pub fn enable_radio_bod(&self) -> ENABLE_RADIO_BOD_R {
ENABLE_RADIO_BOD_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - N/A"]
#[inline(always)]
pub fn rcbll_ctrl(&mut self) -> RCBLL_CTRL_W {
RCBLL_CTRL_W { w: self }
}
#[doc = "Bit 1 - N/A"]
#[inline(always)]
pub fn rcbll_cpu_req(&mut self) -> RCBLL_CPU_REQ_W {
RCBLL_CPU_REQ_W { w: self }
}
#[doc = "Bit 2 - N/A"]
#[inline(always)]
pub fn cpu_single_write(&mut self) -> CPU_SINGLE_WRITE_W {
CPU_SINGLE_WRITE_W { w: self }
}
#[doc = "Bit 3 - N/A"]
#[inline(always)]
pub fn cpu_single_read(&mut self) -> CPU_SINGLE_READ_W {
CPU_SINGLE_READ_W { w: self }
}
#[doc = "Bit 4 - N/A"]
#[inline(always)]
pub fn allow_cpu_access_tx_rx(&mut self) -> ALLOW_CPU_ACCESS_TX_RX_W {
ALLOW_CPU_ACCESS_TX_RX_W { w: self }
}
#[doc = "Bit 5 - N/A"]
#[inline(always)]
pub fn enable_radio_bod(&mut self) -> ENABLE_RADIO_BOD_W {
ENABLE_RADIO_BOD_W { w: self }
}
}
|
#![cfg(not(target_arch = "wasm32"))]
//! Surface syntax tests of asynchrony and networking.
use std::collections::{BTreeSet, HashSet};
use std::error::Error;
use std::net::{Ipv4Addr, SocketAddr};
use std::time::Duration;
use bytes::Bytes;
use hydroflow::scheduled::graph::Hydroflow;
use hydroflow::util::{collect_ready_async, ready_iter, tcp_lines};
use hydroflow::{assert_graphvis_snapshots, hydroflow_syntax, rassert, rassert_eq};
use multiplatform_test::multiplatform_test;
use tokio::net::{TcpListener, TcpStream, UdpSocket};
use tokio::task::LocalSet;
use tokio_util::codec::{BytesCodec, FramedWrite, LinesCodec};
use tracing::Instrument;
#[multiplatform_test(hydroflow, env_tracing)]
pub async fn test_echo_udp() -> Result<(), Box<dyn Error>> {
let local = LocalSet::new();
let server_socket = UdpSocket::bind((Ipv4Addr::UNSPECIFIED, 0)).await?;
let server_addr = server_socket.local_addr()?;
let server_addr: SocketAddr = (Ipv4Addr::LOCALHOST, server_addr.port()).into();
// Server:
let serv = local.spawn_local(async {
let socket = server_socket;
let (udp_send, udp_recv, _) = hydroflow::util::udp_lines(socket);
println!("Server live!");
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df: Hydroflow = hydroflow_syntax! {
recv = source_stream(udp_recv)
-> map(|r| r.unwrap())
-> tee();
// Echo
recv[0] -> dest_sink(udp_send);
// Testing
recv[1] -> map(|(s, _addr)| s) -> for_each(|s| seen_send.send(s).unwrap());
};
tokio::select! {
_ = df.run_async() => (),
_ = tokio::time::sleep(Duration::from_secs(1)) => (),
};
let seen: HashSet<_> = collect_ready_async(seen_recv).await;
rassert_eq!(4, seen.len())?;
rassert!(seen.contains("Hello"))?;
rassert!(seen.contains("World"))?;
rassert!(seen.contains("Raise"))?;
rassert!(seen.contains("Count"))?;
Ok(()) as Result<(), Box<dyn Error>>
});
// Client A:
let client_a = local.spawn_local(async move {
let socket = UdpSocket::bind((Ipv4Addr::UNSPECIFIED, 0))
.await
.unwrap();
let (send_udp, recv_udp, _) = hydroflow::util::udp_lines(socket);
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df = hydroflow_syntax! {
recv = source_stream(recv_udp)
-> map(|r| r.unwrap())
-> tee();
recv[0] -> for_each(|x| println!("client A recv: {:?}", x));
recv[1] -> map(|(s, _addr)| s) -> for_each(|s| seen_send.send(s).unwrap());
// Sending
source_iter([ "Hello", "World" ]) -> map(|s| (s.to_owned(), server_addr)) -> dest_sink(send_udp);
};
tokio::select! {
_ = df.run_async() => (),
_ = tokio::time::sleep(Duration::from_secs(1)) => (),
};
let seen: Vec<_> = collect_ready_async(seen_recv).await;
rassert_eq!(&["Hello".to_owned(), "World".to_owned()], &*seen)?;
Ok(()) as Result<(), Box<dyn Error>>
});
// Client B:
let client_b = local.spawn_local(async move {
let socket = UdpSocket::bind((Ipv4Addr::UNSPECIFIED, 0))
.await
.unwrap();
let (send_udp, recv_udp, _) = hydroflow::util::udp_lines(socket);
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df = hydroflow_syntax! {
recv = source_stream(recv_udp)
-> map(|r| r.unwrap())
-> tee();
recv[0] -> for_each(|x| println!("client B recv: {:?}", x));
recv[1] -> map(|(s, _addr)| s) -> for_each(|s| seen_send.send(s).unwrap());
// Sending
source_iter([ "Raise", "Count" ]) -> map(|s| (s.to_owned(), server_addr)) -> dest_sink(send_udp);
};
tokio::select! {
_ = df.run_async() => (),
_ = tokio::time::sleep(Duration::from_secs(1)) => (),
};
let seen: Vec<_> = collect_ready_async(seen_recv).await;
rassert_eq!(&["Raise".to_owned(), "Count".to_owned()], &*seen)?;
Ok(()) as Result<(), Box<dyn Error>>
});
local.await;
serv.await??;
client_a.await??;
client_b.await??;
Ok(())
}
#[multiplatform_test(hydroflow, env_tracing)]
pub async fn test_echo_tcp() -> Result<(), Box<dyn Error>> {
let local = LocalSet::new();
// Port 0 -> picks any available port.
let listener = TcpListener::bind((std::net::Ipv4Addr::LOCALHOST, 0)).await?;
let addr = listener.local_addr()?;
// Server:
let serv = local.spawn_local(async move {
let (server_stream, _) = listener.accept().await?;
let (server_send, server_recv) = tcp_lines(server_stream);
println!("Server accepted connection!");
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df: Hydroflow = hydroflow_syntax! {
rev = source_stream(server_recv)
-> map(|x| x.unwrap())
-> tee();
rev[0] -> dest_sink(server_send);
rev[1] -> for_each(|s| seen_send.send(s).unwrap());
};
tokio::time::timeout(Duration::from_secs(1), df.run_async())
.await
.expect_err("Expected time out");
let seen: Vec<_> = collect_ready_async(seen_recv).await;
rassert_eq!(&["Hello".to_owned(), "World".to_owned()], &*seen)?;
Ok(()) as Result<(), Box<dyn Error>>
});
// Client:
let client = local.spawn_local(async move {
let client_stream = TcpStream::connect(addr).await?;
let (client_send, client_recv) = tcp_lines(client_stream);
println!("Client connected!");
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df = hydroflow_syntax! {
recv = source_stream(client_recv)
-> map(|x| x.unwrap())
-> tee();
recv[0] -> for_each(|s| println!("echo {}", s));
recv[1] -> for_each(|s| seen_send.send(s).unwrap());
source_iter([
"Hello",
"World",
]) -> dest_sink(client_send);
};
println!("Client running!");
tokio::time::timeout(Duration::from_secs(1), df.run_async())
.await
.expect_err("Expected time out");
let seen: Vec<_> = collect_ready_async(seen_recv).await;
rassert_eq!(&["Hello".to_owned(), "World".to_owned()], &*seen)?;
Ok(()) as Result<(), Box<dyn Error>>
});
local.await;
serv.await??;
client.await??;
Ok(())
}
#[multiplatform_test(hydroflow, env_tracing)]
pub async fn test_echo() {
// An edge in the input data = a pair of `usize` vertex IDs.
let (lines_send, lines_recv) = hydroflow::util::unbounded_channel::<String>();
// LinesCodec separates each line from `lines_recv` with `\n`.
let stdout_lines = FramedWrite::new(tokio::io::stdout(), LinesCodec::new());
let mut df: Hydroflow = hydroflow_syntax! {
source_stream(lines_recv) -> dest_sink(stdout_lines);
};
assert_graphvis_snapshots!(df);
df.run_available();
lines_send.send("Hello".to_owned()).unwrap();
lines_send.send("World".to_owned()).unwrap();
df.run_available();
lines_send.send("Hello".to_owned()).unwrap();
lines_send.send("World".to_owned()).unwrap();
df.run_available();
// Allow background thread to catch up.
tokio::time::sleep(Duration::from_secs(1)).await;
}
#[multiplatform_test(hydroflow, env_tracing)]
pub async fn test_futures_stream_sink() {
const MAX: usize = 20;
let (mut send, recv) = hydroflow::futures::channel::mpsc::channel::<usize>(5);
send.try_send(0).unwrap();
let (seen_send, seen_recv) = hydroflow::util::unbounded_channel();
let mut df = hydroflow_syntax! {
recv = source_stream(recv) -> tee();
recv[0] -> map(|x| x + 1)
-> filter(|&x| x < MAX)
-> dest_sink(send);
recv[1] -> for_each(|x| seen_send.send(x).unwrap());
};
tokio::time::timeout(Duration::from_secs(1), df.run_async())
.await
.expect_err("Expected timeout, `run_async` doesn't return.");
let seen: Vec<_> = collect_ready_async(seen_recv).await;
assert_eq!(&std::array::from_fn::<_, MAX, _>(|i| i), &*seen);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_dest_sink_bounded_channel() {
// In this example we use a _bounded_ channel for our `Sink`. This is for demonstration only,
// instead you should use [`hydroflow::util::unbounded_channel`]. A bounded channel results in
// `Hydroflow` buffering items internally instead of within the channel.
let (send, recv) = tokio::sync::mpsc::channel::<usize>(5);
let send = tokio_util::sync::PollSender::new(send);
let mut recv = tokio_stream::wrappers::ReceiverStream::new(recv);
let mut flow = hydroflow_syntax! {
source_iter(0..10) -> dest_sink(send);
};
tokio::time::timeout(std::time::Duration::from_secs(1), flow.run_async())
.await
.expect_err("Expected time out");
// Only 5 elemts received due to buffer size
let out: Vec<_> = ready_iter(&mut recv).collect();
assert_eq!(&[0, 1, 2, 3, 4], &*out);
tokio::task::yield_now().await;
let out: Vec<_> = ready_iter(&mut recv).collect();
assert_eq!(&[5, 6, 7, 8, 9], &*out);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_dest_sink_duplex() {
use bytes::Bytes;
use tokio::io::AsyncReadExt;
use tokio_util::codec;
// Like a channel, but for a stream of bytes instead of discrete objects.
let (asyncwrite, mut asyncread) = tokio::io::duplex(256);
// Now instead handle discrete byte lists by length-encoding them.
let sink = codec::LengthDelimitedCodec::builder()
// Use 1 byte len field (max 255) so we don't have to worry about endianness.
.length_field_length(1)
.new_write(asyncwrite);
let mut flow = hydroflow_syntax! {
source_iter([
Bytes::from_static(b"hello"),
Bytes::from_static(b"world"),
]) -> dest_sink(sink);
};
tokio::time::timeout(std::time::Duration::from_secs(1), flow.run_async())
.await
.expect_err("Expected time out");
let mut buf = Vec::<u8>::new();
asyncread.read_buf(&mut buf).await.unwrap();
// `\x05` is length prefix of "5".
assert_eq!(b"\x05hello\x05world", &*buf);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_dest_asyncwrite_duplex() {
use tokio::io::AsyncReadExt;
// Like a channel, but for a stream of bytes instead of discrete objects.
// This could be an output file, network port, stdout, etc.
let (asyncwrite, mut asyncread) = tokio::io::duplex(256);
let sink = FramedWrite::new(asyncwrite, BytesCodec::new());
let mut flow = hydroflow_syntax! {
source_iter([
Bytes::from_static("hello".as_bytes()),
Bytes::from_static("world".as_bytes()),
]) -> dest_sink(sink);
};
tokio::time::timeout(std::time::Duration::from_secs(1), flow.run_async())
.await
.expect_err("Expected time out");
let mut buf = Vec::<u8>::new();
asyncread.read_buf(&mut buf).await.unwrap();
// `\x05` is length prefix of "5".
assert_eq!(b"helloworld", &*buf);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_source_stream() {
LocalSet::new()
.run_until(async {
let (a_send, a_recv) = hydroflow::util::unbounded_channel::<usize>();
let (b_send, b_recv) = hydroflow::util::unbounded_channel::<usize>();
tokio::task::spawn_local(async move {
let mut flow = hydroflow_syntax! {
source_stream(a_recv) -> for_each(|x| { b_send.send(x).unwrap(); });
};
flow.run_async().await.unwrap();
});
tokio::task::spawn_local(async move {
let mut flow = hydroflow_syntax! {
source_stream(b_recv) -> for_each(|x| println!("{}", x));
};
flow.run_async().await.unwrap();
});
a_send.send(1).unwrap();
a_send.send(2).unwrap();
a_send.send(3).unwrap();
tokio::task::yield_now().await;
})
.await;
}
/// Check to make sure hf.run_async() does not hang due to replaying stateful operators saturating
/// `run_available()`.
///
/// This test is a little bit race-ey... if for some insane reason a tick (task_b) runs longer than
/// the send loop delay (task_a).
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_check_state_yielding() {
LocalSet::new()
.run_until(async {
let (a_send, a_recv) = hydroflow::util::unbounded_channel::<usize>();
let (b_send, mut b_recv) = hydroflow::util::unbounded_channel::<usize>();
let task_a = tokio::task::spawn_local(
async move {
for a in 0..10 {
tokio::time::sleep(Duration::from_millis(100)).await;
tracing::debug!(a = a, "Sending.");
a_send.send(a).unwrap();
}
}
.instrument(tracing::debug_span!("task_a")),
);
let task_b = tokio::task::spawn_local(
async move {
let mut hf = hydroflow_syntax! {
source_stream(a_recv)
-> reduce::<'static>(|a: &mut _, b| *a += b)
-> for_each(|x| b_send.send(x).unwrap());
};
// Run 100 millis longer than the sending task.
let done_sending = async {
task_a.await.unwrap();
tokio::time::sleep(Duration::from_millis(100)).await;
};
tokio::select! {
_ = done_sending => {
tracing::info!("`task_a` (sending) complete.");
},
_ = hf.run_async() => {
panic!("`run_async()` should run forever.");
}
}
assert_eq!(
[0, 1, 3, 6, 10, 15, 21, 28, 36, 45]
.into_iter()
.collect::<BTreeSet<_>>(),
collect_ready_async(&mut b_recv).await
);
}
.instrument(tracing::debug_span!("task_b")),
);
task_b.await.unwrap();
})
.await;
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_repeat_iter() {
let (b_send, b_recv) = hydroflow::util::unbounded_channel::<usize>();
let mut hf = hydroflow_syntax! {
source_iter(0..3) -> persist()
-> for_each(|x| b_send.send(x).unwrap());
};
hf.run_available_async().await;
let seen: Vec<_> = collect_ready_async(b_recv).await;
assert_eq!(&[0, 1, 2], &*seen);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_event_repeat_iter() {
let (a_send, a_recv) = hydroflow::util::unbounded_channel::<usize>();
let (b_send, b_recv) = hydroflow::util::unbounded_channel::<usize>();
let mut hf = hydroflow_syntax! {
source_iter(0..3) -> persist() -> my_union;
source_stream(a_recv) -> my_union;
my_union = union() -> for_each(|x| b_send.send(x).unwrap());
};
tokio::task::spawn(
async move {
tokio::time::sleep(Duration::from_millis(100)).await;
tracing::debug!("sending `10`.");
a_send.send(10).unwrap();
}
.instrument(tracing::debug_span!("sender")),
);
tokio::time::timeout(Duration::from_millis(200), hf.run_async())
.await
.expect_err("Expected timeout");
let seen: Vec<_> = collect_ready_async(b_recv).await;
assert_eq!(&[0, 1, 2, 0, 1, 2, 10], &*seen);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_tcp() {
let (tx_out, rx_out) = hydroflow::util::unbounded_channel::<String>();
let (tx, rx, server_addr) =
hydroflow::util::bind_tcp_lines("127.0.0.1:0".parse().unwrap()).await;
let mut echo_server = hydroflow_syntax! {
source_stream(rx)
-> filter_map(Result::ok)
-> dest_sink(tx);
};
let (tx, rx) = hydroflow::util::connect_tcp_lines();
let mut echo_client = hydroflow_syntax! {
source_iter([("Hello".to_owned(), server_addr)])
-> dest_sink(tx);
source_stream(rx)
-> filter_map(Result::ok)
-> map(|(string, _)| string)
-> for_each(|x| tx_out.send(x).unwrap());
};
tokio::time::timeout(
Duration::from_millis(200),
futures::future::join(echo_server.run_async(), echo_client.run_async()),
)
.await
.expect_err("Expected timeout");
let seen: Vec<_> = collect_ready_async(rx_out).await;
assert_eq!(&["Hello".to_owned()], &*seen);
}
#[multiplatform_test(hydroflow, env_tracing)]
async fn asynctest_udp() {
let (tx_out, rx_out) = hydroflow::util::unbounded_channel::<String>();
let (tx, rx, server_addr) =
hydroflow::util::bind_udp_lines("127.0.0.1:0".parse().unwrap()).await;
let mut echo_server = hydroflow_syntax! {
source_stream(rx)
-> filter_map(Result::ok)
-> dest_sink(tx);
};
let (tx, rx, _) = hydroflow::util::bind_udp_lines("127.0.0.1:0".parse().unwrap()).await;
let mut echo_client = hydroflow_syntax! {
source_iter([("Hello".to_owned(), server_addr)])
-> dest_sink(tx);
source_stream(rx)
-> filter_map(Result::ok)
-> map(|(string, _)| string)
-> for_each(|x| tx_out.send(x).unwrap());
};
tokio::time::timeout(
Duration::from_millis(200),
futures::future::join(echo_server.run_async(), echo_client.run_async()),
)
.await
.expect_err("Expected timeout");
let seen: Vec<_> = collect_ready_async(rx_out).await;
assert_eq!(&["Hello".to_owned()], &*seen);
}
|
use glium;
use glium::{Display, Surface};
use glium::texture::{Texture2d, ClientFormat, RawImage2d};
use std::borrow::Cow;
use tile_net::TileNet;
use std;
// Re-export for configuration
pub use glium::uniforms::MinifySamplerFilter;
pub use glium::uniforms::MagnifySamplerFilter;
pub struct Renderer {
net_width: usize,
net_height: usize,
// OpenGL
shader_prg: glium::Program,
quad_vbo: glium::VertexBuffer<Vertex>,
texture: Texture2d,
// Uniforms/config
bg_col: [f32; 3],
minify_filter: MinifySamplerFilter,
magnify_filter: MagnifySamplerFilter,
smooth: bool,
}
impl Renderer {
pub fn new(display: Display, net: &TileNet<u8>) -> Renderer {
let vert_src = include_str!("../../shaders/xyuv_tex.vert");
let frag_src = include_str!("../../shaders/xyuv_tex.frag");
let shader_prg = glium::Program::from_source(&display, vert_src, frag_src, None).unwrap();
let fullscreen_quad = vec![Vertex { pos: [-1.0, -1.0] },
Vertex { pos: [1.0, -1.0] },
Vertex { pos: [1.0, 1.0] },
Vertex { pos: [1.0, 1.0] },
Vertex { pos: [-1.0, 1.0] },
Vertex { pos: [-1.0, -1.0] }];
let quad_vbo = ::glium::VertexBuffer::new(&display, &fullscreen_quad).unwrap();
let texture_data: Vec<Vec<u8>> = vec!(vec!(0; net.get_size().0); net.get_size().1);
let texture = glium::texture::Texture2d::new(&display, texture_data).unwrap();
let mut new = Renderer {
net_width: net.get_size().0,
net_height: net.get_size().1,
shader_prg: shader_prg,
quad_vbo: quad_vbo,
texture: texture,
bg_col: [0.5, 0.5, 0.5],
minify_filter: MinifySamplerFilter::Nearest,
magnify_filter: MagnifySamplerFilter::Nearest,
smooth: true,
};
new.upload_entire_texture(net);
new
}
pub fn set_bg_col(&mut self, r: f32, g: f32, b: f32) {
self.bg_col = [r, g, b];
}
pub fn set_minify_filter(&mut self, filter: MinifySamplerFilter) {
self.minify_filter = filter;
}
pub fn set_magnify_filter(&mut self, filter: MagnifySamplerFilter) {
self.magnify_filter = filter;
}
pub fn set_smooth(&mut self, to: bool) {
self.smooth = to;
}
pub fn get_smooth(&mut self) -> bool{
self.smooth
}
pub fn toggle_smooth(&mut self) {
self.smooth = !self.smooth;
}
pub fn render(&mut self,
target: &mut glium::Frame,
center: (f32, f32),
zoom: f32,
width: u32,
height: u32) {
let uniforms = uniform! (
sampler: glium::uniforms::Sampler::new(&self.texture)
.wrap_function(glium::uniforms::SamplerWrapFunction::Clamp)
.minify_filter(self.minify_filter)
.magnify_filter(self.magnify_filter),
view_size: [width as f32 / zoom, height as f32 / zoom],
texsize: [self.net_width as f32, self.net_height as f32],
screen_center: [center.0, center.1],
bg_col: self.bg_col,
smooth_: self.smooth,
);
let indices = glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList);
target.draw(self.quad_vbo.slice(0..6).unwrap(),
indices,
&self.shader_prg,
&uniforms,
&Default::default())
.unwrap();
// END
}
pub fn upload_entire_texture(&mut self, net: &TileNet<u8>) {
let net_size = net.get_size();
self.upload_texture(net, 0, 0, net_size.0 as u32, net_size.1 as u32);
}
pub fn upload_texture(&mut self, net: &TileNet<u8>, left: u32, bottom: u32, width: u32, height: u32) {
let upload_area = glium::Rect {
left: left,
bottom: bottom,
width: width,
height: height,
};
let pixels: Vec<u8> = net.view_box((left as usize, (left+width)as usize, bottom as usize, (bottom+height) as usize)).map(|x| *x.0).collect();
assert!(pixels.len() == (width * height) as usize);
let upload_data = RawImage2d {
data: Cow::Borrowed(&pixels),
width: width,
height: height,
format: ClientFormat::U8,
};
self.texture.write(upload_area, upload_data);
}
}
// For rendering
#[derive(Copy, Clone)]
struct Vertex {
pos: [f32; 2],
}
implement_vertex!(Vertex, pos);
|
#[doc = "Register `MAPR` reader"]
pub type R = crate::R<MAPR_SPEC>;
#[doc = "Register `MAPR` writer"]
pub type W = crate::W<MAPR_SPEC>;
#[doc = "Field `SPI1_REMAP` reader - SPI1 remapping"]
pub type SPI1_REMAP_R = crate::BitReader;
#[doc = "Field `SPI1_REMAP` writer - SPI1 remapping"]
pub type SPI1_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2C1_REMAP` reader - I2C1 remapping"]
pub type I2C1_REMAP_R = crate::BitReader;
#[doc = "Field `I2C1_REMAP` writer - I2C1 remapping"]
pub type I2C1_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART1_REMAP` reader - USART1 remapping"]
pub type USART1_REMAP_R = crate::BitReader;
#[doc = "Field `USART1_REMAP` writer - USART1 remapping"]
pub type USART1_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART2_REMAP` reader - USART2 remapping"]
pub type USART2_REMAP_R = crate::BitReader;
#[doc = "Field `USART2_REMAP` writer - USART2 remapping"]
pub type USART2_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `USART3_REMAP` reader - USART3 remapping"]
pub type USART3_REMAP_R = crate::FieldReader;
#[doc = "Field `USART3_REMAP` writer - USART3 remapping"]
pub type USART3_REMAP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `TIM1_REMAP` reader - TIM1 remapping"]
pub type TIM1_REMAP_R = crate::FieldReader;
#[doc = "Field `TIM1_REMAP` writer - TIM1 remapping"]
pub type TIM1_REMAP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `TIM2_REMAP` reader - TIM2 remapping"]
pub type TIM2_REMAP_R = crate::FieldReader;
#[doc = "Field `TIM2_REMAP` writer - TIM2 remapping"]
pub type TIM2_REMAP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `TIM3_REMAP` reader - TIM3 remapping"]
pub type TIM3_REMAP_R = crate::FieldReader;
#[doc = "Field `TIM3_REMAP` writer - TIM3 remapping"]
pub type TIM3_REMAP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `TIM4_REMAP` reader - TIM4 remapping"]
pub type TIM4_REMAP_R = crate::BitReader;
#[doc = "Field `TIM4_REMAP` writer - TIM4 remapping"]
pub type TIM4_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CAN1_REMAP` reader - CAN1 remapping"]
pub type CAN1_REMAP_R = crate::FieldReader;
#[doc = "Field `CAN1_REMAP` writer - CAN1 remapping"]
pub type CAN1_REMAP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `PD01_REMAP` reader - Port D0/Port D1 mapping on OSCIN/OSCOUT"]
pub type PD01_REMAP_R = crate::BitReader;
#[doc = "Field `PD01_REMAP` writer - Port D0/Port D1 mapping on OSCIN/OSCOUT"]
pub type PD01_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM5CH4_IREMAP` reader - Set and cleared by software"]
pub type TIM5CH4_IREMAP_R = crate::BitReader;
#[doc = "Field `TIM5CH4_IREMAP` writer - Set and cleared by software"]
pub type TIM5CH4_IREMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ETH_REMAP` reader - Ethernet MAC I/O remapping"]
pub type ETH_REMAP_R = crate::BitReader;
#[doc = "Field `ETH_REMAP` writer - Ethernet MAC I/O remapping"]
pub type ETH_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CAN2_REMAP` reader - CAN2 I/O remapping"]
pub type CAN2_REMAP_R = crate::BitReader;
#[doc = "Field `CAN2_REMAP` writer - CAN2 I/O remapping"]
pub type CAN2_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MII_RMII_SEL` reader - MII or RMII selection"]
pub type MII_RMII_SEL_R = crate::BitReader;
#[doc = "Field `MII_RMII_SEL` writer - MII or RMII selection"]
pub type MII_RMII_SEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SWJ_CFG` writer - Serial wire JTAG configuration"]
pub type SWJ_CFG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `SPI3_REMAP` reader - SPI3/I2S3 remapping"]
pub type SPI3_REMAP_R = crate::BitReader;
#[doc = "Field `SPI3_REMAP` writer - SPI3/I2S3 remapping"]
pub type SPI3_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TIM2ITR1_IREMAP` reader - TIM2 internal trigger 1 remapping"]
pub type TIM2ITR1_IREMAP_R = crate::BitReader;
#[doc = "Field `TIM2ITR1_IREMAP` writer - TIM2 internal trigger 1 remapping"]
pub type TIM2ITR1_IREMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PTP_PPS_REMAP` reader - Ethernet PTP PPS remapping"]
pub type PTP_PPS_REMAP_R = crate::BitReader;
#[doc = "Field `PTP_PPS_REMAP` writer - Ethernet PTP PPS remapping"]
pub type PTP_PPS_REMAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - SPI1 remapping"]
#[inline(always)]
pub fn spi1_remap(&self) -> SPI1_REMAP_R {
SPI1_REMAP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - I2C1 remapping"]
#[inline(always)]
pub fn i2c1_remap(&self) -> I2C1_REMAP_R {
I2C1_REMAP_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - USART1 remapping"]
#[inline(always)]
pub fn usart1_remap(&self) -> USART1_REMAP_R {
USART1_REMAP_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - USART2 remapping"]
#[inline(always)]
pub fn usart2_remap(&self) -> USART2_REMAP_R {
USART2_REMAP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bits 4:5 - USART3 remapping"]
#[inline(always)]
pub fn usart3_remap(&self) -> USART3_REMAP_R {
USART3_REMAP_R::new(((self.bits >> 4) & 3) as u8)
}
#[doc = "Bits 6:7 - TIM1 remapping"]
#[inline(always)]
pub fn tim1_remap(&self) -> TIM1_REMAP_R {
TIM1_REMAP_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bits 8:9 - TIM2 remapping"]
#[inline(always)]
pub fn tim2_remap(&self) -> TIM2_REMAP_R {
TIM2_REMAP_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bits 10:11 - TIM3 remapping"]
#[inline(always)]
pub fn tim3_remap(&self) -> TIM3_REMAP_R {
TIM3_REMAP_R::new(((self.bits >> 10) & 3) as u8)
}
#[doc = "Bit 12 - TIM4 remapping"]
#[inline(always)]
pub fn tim4_remap(&self) -> TIM4_REMAP_R {
TIM4_REMAP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bits 13:14 - CAN1 remapping"]
#[inline(always)]
pub fn can1_remap(&self) -> CAN1_REMAP_R {
CAN1_REMAP_R::new(((self.bits >> 13) & 3) as u8)
}
#[doc = "Bit 15 - Port D0/Port D1 mapping on OSCIN/OSCOUT"]
#[inline(always)]
pub fn pd01_remap(&self) -> PD01_REMAP_R {
PD01_REMAP_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Set and cleared by software"]
#[inline(always)]
pub fn tim5ch4_iremap(&self) -> TIM5CH4_IREMAP_R {
TIM5CH4_IREMAP_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 21 - Ethernet MAC I/O remapping"]
#[inline(always)]
pub fn eth_remap(&self) -> ETH_REMAP_R {
ETH_REMAP_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - CAN2 I/O remapping"]
#[inline(always)]
pub fn can2_remap(&self) -> CAN2_REMAP_R {
CAN2_REMAP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - MII or RMII selection"]
#[inline(always)]
pub fn mii_rmii_sel(&self) -> MII_RMII_SEL_R {
MII_RMII_SEL_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 28 - SPI3/I2S3 remapping"]
#[inline(always)]
pub fn spi3_remap(&self) -> SPI3_REMAP_R {
SPI3_REMAP_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - TIM2 internal trigger 1 remapping"]
#[inline(always)]
pub fn tim2itr1_iremap(&self) -> TIM2ITR1_IREMAP_R {
TIM2ITR1_IREMAP_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Ethernet PTP PPS remapping"]
#[inline(always)]
pub fn ptp_pps_remap(&self) -> PTP_PPS_REMAP_R {
PTP_PPS_REMAP_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - SPI1 remapping"]
#[inline(always)]
#[must_use]
pub fn spi1_remap(&mut self) -> SPI1_REMAP_W<MAPR_SPEC, 0> {
SPI1_REMAP_W::new(self)
}
#[doc = "Bit 1 - I2C1 remapping"]
#[inline(always)]
#[must_use]
pub fn i2c1_remap(&mut self) -> I2C1_REMAP_W<MAPR_SPEC, 1> {
I2C1_REMAP_W::new(self)
}
#[doc = "Bit 2 - USART1 remapping"]
#[inline(always)]
#[must_use]
pub fn usart1_remap(&mut self) -> USART1_REMAP_W<MAPR_SPEC, 2> {
USART1_REMAP_W::new(self)
}
#[doc = "Bit 3 - USART2 remapping"]
#[inline(always)]
#[must_use]
pub fn usart2_remap(&mut self) -> USART2_REMAP_W<MAPR_SPEC, 3> {
USART2_REMAP_W::new(self)
}
#[doc = "Bits 4:5 - USART3 remapping"]
#[inline(always)]
#[must_use]
pub fn usart3_remap(&mut self) -> USART3_REMAP_W<MAPR_SPEC, 4> {
USART3_REMAP_W::new(self)
}
#[doc = "Bits 6:7 - TIM1 remapping"]
#[inline(always)]
#[must_use]
pub fn tim1_remap(&mut self) -> TIM1_REMAP_W<MAPR_SPEC, 6> {
TIM1_REMAP_W::new(self)
}
#[doc = "Bits 8:9 - TIM2 remapping"]
#[inline(always)]
#[must_use]
pub fn tim2_remap(&mut self) -> TIM2_REMAP_W<MAPR_SPEC, 8> {
TIM2_REMAP_W::new(self)
}
#[doc = "Bits 10:11 - TIM3 remapping"]
#[inline(always)]
#[must_use]
pub fn tim3_remap(&mut self) -> TIM3_REMAP_W<MAPR_SPEC, 10> {
TIM3_REMAP_W::new(self)
}
#[doc = "Bit 12 - TIM4 remapping"]
#[inline(always)]
#[must_use]
pub fn tim4_remap(&mut self) -> TIM4_REMAP_W<MAPR_SPEC, 12> {
TIM4_REMAP_W::new(self)
}
#[doc = "Bits 13:14 - CAN1 remapping"]
#[inline(always)]
#[must_use]
pub fn can1_remap(&mut self) -> CAN1_REMAP_W<MAPR_SPEC, 13> {
CAN1_REMAP_W::new(self)
}
#[doc = "Bit 15 - Port D0/Port D1 mapping on OSCIN/OSCOUT"]
#[inline(always)]
#[must_use]
pub fn pd01_remap(&mut self) -> PD01_REMAP_W<MAPR_SPEC, 15> {
PD01_REMAP_W::new(self)
}
#[doc = "Bit 16 - Set and cleared by software"]
#[inline(always)]
#[must_use]
pub fn tim5ch4_iremap(&mut self) -> TIM5CH4_IREMAP_W<MAPR_SPEC, 16> {
TIM5CH4_IREMAP_W::new(self)
}
#[doc = "Bit 21 - Ethernet MAC I/O remapping"]
#[inline(always)]
#[must_use]
pub fn eth_remap(&mut self) -> ETH_REMAP_W<MAPR_SPEC, 21> {
ETH_REMAP_W::new(self)
}
#[doc = "Bit 22 - CAN2 I/O remapping"]
#[inline(always)]
#[must_use]
pub fn can2_remap(&mut self) -> CAN2_REMAP_W<MAPR_SPEC, 22> {
CAN2_REMAP_W::new(self)
}
#[doc = "Bit 23 - MII or RMII selection"]
#[inline(always)]
#[must_use]
pub fn mii_rmii_sel(&mut self) -> MII_RMII_SEL_W<MAPR_SPEC, 23> {
MII_RMII_SEL_W::new(self)
}
#[doc = "Bits 24:26 - Serial wire JTAG configuration"]
#[inline(always)]
#[must_use]
pub fn swj_cfg(&mut self) -> SWJ_CFG_W<MAPR_SPEC, 24> {
SWJ_CFG_W::new(self)
}
#[doc = "Bit 28 - SPI3/I2S3 remapping"]
#[inline(always)]
#[must_use]
pub fn spi3_remap(&mut self) -> SPI3_REMAP_W<MAPR_SPEC, 28> {
SPI3_REMAP_W::new(self)
}
#[doc = "Bit 29 - TIM2 internal trigger 1 remapping"]
#[inline(always)]
#[must_use]
pub fn tim2itr1_iremap(&mut self) -> TIM2ITR1_IREMAP_W<MAPR_SPEC, 29> {
TIM2ITR1_IREMAP_W::new(self)
}
#[doc = "Bit 30 - Ethernet PTP PPS remapping"]
#[inline(always)]
#[must_use]
pub fn ptp_pps_remap(&mut self) -> PTP_PPS_REMAP_W<MAPR_SPEC, 30> {
PTP_PPS_REMAP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "AF remap and debug I/O configuration register (AFIO_MAPR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mapr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mapr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MAPR_SPEC;
impl crate::RegisterSpec for MAPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mapr::R`](R) reader structure"]
impl crate::Readable for MAPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mapr::W`](W) writer structure"]
impl crate::Writable for MAPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MAPR to value 0"]
impl crate::Resettable for MAPR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
pub mod constants;
pub mod handlers;
pub mod report;
pub mod room;
|
// Copyright (c) 2019 - 2020 ESRLabs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use color_eyre::eyre::{eyre, Result};
use colored::Colorize;
use lazy_static::lazy_static;
use log::{Level, Metadata, Record};
use regex::Regex;
use std::{
sync::{self, mpsc},
time::{Duration, Instant},
};
use tokio::{select, task::spawn_blocking, time};
lazy_static! {
static ref QUEUE: (
sync::Mutex<mpsc::Sender<String>>,
sync::Mutex<mpsc::Receiver<String>>
) = {
let (tx, rx) = mpsc::channel::<String>();
(sync::Mutex::new(tx), sync::Mutex::new(rx))
};
static ref START: Instant = Instant::now();
}
pub struct LogParser;
impl log::Log for LogParser {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn log(&self, record: &Record) {
fn level_format(level: Level) -> String {
match level {
Level::Error => "E".red(),
Level::Warn => "W".truecolor(255, 69, 0),
Level::Info => "I".normal(),
Level::Debug => "D".green(),
Level::Trace => "T".yellow(),
}
.to_string()
}
let start = *START;
println!(
"{:010} {} {}: {}",
Instant::now().duration_since(start).as_millis(),
level_format(record.level()),
record.module_path().unwrap_or(""),
record.args().to_string()
);
QUEUE
.0
.lock()
.unwrap()
.send(record.args().to_string())
.expect("Logger queue error")
}
fn flush(&self) {}
}
/// Clear the logger queue prior to each test run
pub fn reset() {
let queue = QUEUE.1.lock().expect("Failed to lock log queue");
while queue.try_recv().is_ok() {}
}
/// Assume the runtime to log a line matching `pattern` within
/// `timeout` seconds.
pub async fn assume(pattern: &'static str, timeout: u64) -> Result<()> {
let assumption = spawn_blocking(move || loop {
let regex = Regex::new(&pattern).expect("Invalid regex");
match QUEUE.1.lock().unwrap().recv() {
Ok(n) if regex.is_match(&n) => break Ok(()),
Ok(_) => continue,
Err(e) => break Err(e),
}
});
let timeout = time::sleep(Duration::from_secs(timeout));
select! {
_ = timeout => Err(eyre!("Timeout waiting for {}", pattern)),
_ = assumption => Ok(()),
}
}
|
use crate::mbr::MasterBootRecord;
use crate::prelude::*;
// #[repr(C, packed)]
// #[derive(Copy, Clone)]
// struct RawUuid(u32, u16, u16, [u8; 8]);
// impl From<&RawUuid> for Uuid {
// fn from(raw: &RawUuid) -> Self {
// Uuid::from_fields(raw.0, raw.1, raw.2, &raw.3).unwrap()
// }
// }
#[repr(C, packed)]
#[derive(Copy, Clone)]
struct Header {
signature: u64,
revision: u32,
header_size: u32,
header_crc32: u32,
reserved: u32,
current_lba: u64,
copy_lba: u64,
first_usable_lba: u64,
last_usable_lba: u64,
disk_id: Uuid,
partition_table_lba: u64,
partition_count: u32,
partition_entry_size: u32,
partition_array_crc32: u32,
}
const SIGNATURE: u64 = 0x5452_4150_2049_4645_u64;
const HEADER_SIZE: u32 = 92;
const REVISION: u32 = 0x0001_0000;
impl Header {
pub fn is_valid(&self) -> bool {
SIGNATURE == self.signature && self.crc() == self.header_crc32
}
pub fn crc(&self) -> u32 {
let mut copy = *self;
copy.header_crc32 = 0;
crc::struct_crc32(©)
}
}
impl crate::AsByteSlice for Header {
unsafe fn as_byte_slice(&self) -> &[u8] {
core::slice::from_raw_parts(self as *const Self as *const u8, core::mem::size_of::<Self>())
}
}
#[repr(C, packed)]
struct RawPartitionRecord {
partition_type: Uuid,
partition_id: Uuid,
first_lba: u64,
last_lba: u64, // inclusive
flags: u64,
name: [u16; 36],
}
pub struct PartitionInfo {
pub id: Uuid,
pub kind: Uuid,
pub offset: u64,
pub length: u64,
pub flags: u64,
pub name: String,
}
pub struct Layout {
_protective_mbr: MasterBootRecord,
disk_id: Uuid,
partitions: Vec<PartitionInfo>,
}
fn read_partitions(disk: &impl Disk, header: &Header) -> Result<Vec<PartitionInfo>> {
unsafe {
let sector_size = disk.logical_sector_size()?;
let buffer_size = math::round_up(header.partition_count * header.partition_entry_size, sector_size);
let mut buffer = crate::alloc_buffer(buffer_size as usize);
disk.read_exact_at(sector_size as u64 * header.partition_table_lba, buffer.as_mut_slice())?;
let crc = crc::crc32(&buffer);
if crc != header.partition_array_crc32 {
todo!("Invalid partition table crc error"); // InvalidGptCrc
}
let mut partitions = Vec::<PartitionInfo>::new();
for chunk in buffer.chunks_exact(header.partition_entry_size as usize) {
let raw = &*(chunk.as_ptr() as *const RawPartitionRecord);
if raw.partition_id == Uuid::nil() {
break;
}
let offset = raw.first_lba * sector_size as u64;
let length = (raw.last_lba - raw.first_lba + 1) * sector_size as u64;
partitions.push(PartitionInfo {
id: raw.partition_id.swap_bytes(),
kind: raw.partition_type.swap_bytes(),
offset,
length,
flags: raw.flags,
name: String::from_utf16_lossy(&raw.name).trim_end_matches('\0').to_string(), // TODO: FromWide trait
});
}
Ok(partitions)
}
}
impl Layout {
pub(crate) fn read(disk: &impl Disk, mbr: MasterBootRecord) -> Result<Layout> {
if !mbr.is_gpt_protective() {
todo!("Return invalid MBR error") // InvalidGptMbr
}
let sector_size = disk.logical_sector_size()? as u64;
let mut header: Header = tools::read_disk_struct(disk, sector_size)?;
let mut valid = header.is_valid();
if !valid {
// TODO: log warning
// let's try second one
let size = disk.capacity()?;
let secondary_header: Header = tools::read_disk_struct(disk, size - sector_size)?;
if secondary_header.is_valid() {
header = secondary_header;
valid = true;
}
}
if !valid {
todo!("Return invalid GPT header error") // InvalidGptHeader
}
if header.header_size != HEADER_SIZE {
todo!("Log warning: unexpected header size")
}
if header.revision != REVISION {
todo!("Log warning: unexpected revision")
}
let partitions = read_partitions(disk, &header)?;
Ok(Layout {
_protective_mbr: mbr,
disk_id: header.disk_id,
partitions,
})
}
pub fn disk_id(&self) -> &uuid::Uuid {
&self.disk_id
}
pub fn partitions(&self) -> &[PartitionInfo] {
&self.partitions
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct LAYER {
#[doc = "0x00 - Layerx Control Register"]
pub cr: CR,
#[doc = "0x04 - Layerx Window Horizontal Position Configuration Register"]
pub whpcr: WHPCR,
#[doc = "0x08 - Layerx Window Vertical Position Configuration Register"]
pub wvpcr: WVPCR,
#[doc = "0x0c - Layerx Color Keying Configuration Register"]
pub ckcr: CKCR,
#[doc = "0x10 - Layerx Pixel Format Configuration Register"]
pub pfcr: PFCR,
#[doc = "0x14 - Layerx Constant Alpha Configuration Register"]
pub cacr: CACR,
#[doc = "0x18 - Layerx Default Color Configuration Register"]
pub dccr: DCCR,
#[doc = "0x1c - Layerx Blending Factors Configuration Register"]
pub bfcr: BFCR,
_reserved8: [u8; 0x08],
#[doc = "0x28 - Layerx Color Frame Buffer Address Register"]
pub cfbar: CFBAR,
#[doc = "0x2c - Layerx Color Frame Buffer Length Register"]
pub cfblr: CFBLR,
#[doc = "0x30 - Layerx ColorFrame Buffer Line Number Register"]
pub cfblnr: CFBLNR,
_reserved11: [u8; 0x0c],
#[doc = "0x40 - Layerx CLUT Write Register"]
pub clutwr: CLUTWR,
_reserved_end: [u8; 0x3c],
}
#[doc = "CR (rw) register accessor: Layerx Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "Layerx Control Register"]
pub mod cr;
#[doc = "WHPCR (rw) register accessor: Layerx Window Horizontal Position Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`whpcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`whpcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`whpcr`]
module"]
pub type WHPCR = crate::Reg<whpcr::WHPCR_SPEC>;
#[doc = "Layerx Window Horizontal Position Configuration Register"]
pub mod whpcr;
#[doc = "WVPCR (rw) register accessor: Layerx Window Vertical Position Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wvpcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wvpcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`wvpcr`]
module"]
pub type WVPCR = crate::Reg<wvpcr::WVPCR_SPEC>;
#[doc = "Layerx Window Vertical Position Configuration Register"]
pub mod wvpcr;
#[doc = "CKCR (rw) register accessor: Layerx Color Keying Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ckcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ckcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ckcr`]
module"]
pub type CKCR = crate::Reg<ckcr::CKCR_SPEC>;
#[doc = "Layerx Color Keying Configuration Register"]
pub mod ckcr;
#[doc = "PFCR (rw) register accessor: Layerx Pixel Format Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pfcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pfcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pfcr`]
module"]
pub type PFCR = crate::Reg<pfcr::PFCR_SPEC>;
#[doc = "Layerx Pixel Format Configuration Register"]
pub mod pfcr;
#[doc = "CACR (rw) register accessor: Layerx Constant Alpha Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cacr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cacr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cacr`]
module"]
pub type CACR = crate::Reg<cacr::CACR_SPEC>;
#[doc = "Layerx Constant Alpha Configuration Register"]
pub mod cacr;
#[doc = "DCCR (rw) register accessor: Layerx Default Color Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dccr`]
module"]
pub type DCCR = crate::Reg<dccr::DCCR_SPEC>;
#[doc = "Layerx Default Color Configuration Register"]
pub mod dccr;
#[doc = "BFCR (rw) register accessor: Layerx Blending Factors Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bfcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bfcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bfcr`]
module"]
pub type BFCR = crate::Reg<bfcr::BFCR_SPEC>;
#[doc = "Layerx Blending Factors Configuration Register"]
pub mod bfcr;
#[doc = "CFBAR (rw) register accessor: Layerx Color Frame Buffer Address Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfbar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfbar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfbar`]
module"]
pub type CFBAR = crate::Reg<cfbar::CFBAR_SPEC>;
#[doc = "Layerx Color Frame Buffer Address Register"]
pub mod cfbar;
#[doc = "CFBLR (rw) register accessor: Layerx Color Frame Buffer Length Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfblr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfblr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfblr`]
module"]
pub type CFBLR = crate::Reg<cfblr::CFBLR_SPEC>;
#[doc = "Layerx Color Frame Buffer Length Register"]
pub mod cfblr;
#[doc = "CFBLNR (rw) register accessor: Layerx ColorFrame Buffer Line Number Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfblnr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfblnr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfblnr`]
module"]
pub type CFBLNR = crate::Reg<cfblnr::CFBLNR_SPEC>;
#[doc = "Layerx ColorFrame Buffer Line Number Register"]
pub mod cfblnr;
#[doc = "CLUTWR (w) register accessor: Layerx CLUT Write Register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`clutwr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`clutwr`]
module"]
pub type CLUTWR = crate::Reg<clutwr::CLUTWR_SPEC>;
#[doc = "Layerx CLUT Write Register"]
pub mod clutwr;
|
#[doc = "Reader of register CONFIG"]
pub type R = crate::R<u32, super::CONFIG>;
#[doc = "Writer for register CONFIG"]
pub type W = crate::W<u32, super::CONFIG>;
#[doc = "Register CONFIG `reset()`'s with value 0x0400_0000"]
impl crate::ResetValue for super::CONFIG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0400_0000
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum IREF_SEL_A {
#[doc = "0: N/A"]
IREF_SRSS,
#[doc = "1: N/A"]
IREF_PASS,
}
impl From<IREF_SEL_A> for bool {
#[inline(always)]
fn from(variant: IREF_SEL_A) -> Self {
match variant {
IREF_SEL_A::IREF_SRSS => false,
IREF_SEL_A::IREF_PASS => true,
}
}
}
#[doc = "Reader of field `IREF_SEL`"]
pub type IREF_SEL_R = crate::R<bool, IREF_SEL_A>;
impl IREF_SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IREF_SEL_A {
match self.bits {
false => IREF_SEL_A::IREF_SRSS,
true => IREF_SEL_A::IREF_PASS,
}
}
#[doc = "Checks if the value of the field is `IREF_SRSS`"]
#[inline(always)]
pub fn is_iref_srss(&self) -> bool {
*self == IREF_SEL_A::IREF_SRSS
}
#[doc = "Checks if the value of the field is `IREF_PASS`"]
#[inline(always)]
pub fn is_iref_pass(&self) -> bool {
*self == IREF_SEL_A::IREF_PASS
}
}
#[doc = "Write proxy for field `IREF_SEL`"]
pub struct IREF_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> IREF_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: IREF_SEL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn iref_srss(self) -> &'a mut W {
self.variant(IREF_SEL_A::IREF_SRSS)
}
#[doc = "N/A"]
#[inline(always)]
pub fn iref_pass(self) -> &'a mut W {
self.variant(IREF_SEL_A::IREF_PASS)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `FILTER_DELAY`"]
pub type FILTER_DELAY_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `FILTER_DELAY`"]
pub struct FILTER_DELAY_W<'a> {
w: &'a mut W,
}
impl<'a> FILTER_DELAY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 4)) | (((value as u32) & 0x1f) << 4);
self.w
}
}
#[doc = "Configures the delay between shield clock and sensor clock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SHIELD_DELAY_A {
#[doc = "0: Delay line is off; sensor clock = shield clock"]
OFF,
#[doc = "1: shield clock is delayed by 5ns delay w.r.t sensor clock"]
D5NS,
#[doc = "2: shield clock is delayed by 10ns delay w.r.t sensor clock"]
D10NS,
#[doc = "3: shield clock is delayed by 20ns delay w.r.t sensor clock"]
D20NS,
}
impl From<SHIELD_DELAY_A> for u8 {
#[inline(always)]
fn from(variant: SHIELD_DELAY_A) -> Self {
match variant {
SHIELD_DELAY_A::OFF => 0,
SHIELD_DELAY_A::D5NS => 1,
SHIELD_DELAY_A::D10NS => 2,
SHIELD_DELAY_A::D20NS => 3,
}
}
}
#[doc = "Reader of field `SHIELD_DELAY`"]
pub type SHIELD_DELAY_R = crate::R<u8, SHIELD_DELAY_A>;
impl SHIELD_DELAY_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SHIELD_DELAY_A {
match self.bits {
0 => SHIELD_DELAY_A::OFF,
1 => SHIELD_DELAY_A::D5NS,
2 => SHIELD_DELAY_A::D10NS,
3 => SHIELD_DELAY_A::D20NS,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `OFF`"]
#[inline(always)]
pub fn is_off(&self) -> bool {
*self == SHIELD_DELAY_A::OFF
}
#[doc = "Checks if the value of the field is `D5NS`"]
#[inline(always)]
pub fn is_d5ns(&self) -> bool {
*self == SHIELD_DELAY_A::D5NS
}
#[doc = "Checks if the value of the field is `D10NS`"]
#[inline(always)]
pub fn is_d10ns(&self) -> bool {
*self == SHIELD_DELAY_A::D10NS
}
#[doc = "Checks if the value of the field is `D20NS`"]
#[inline(always)]
pub fn is_d20ns(&self) -> bool {
*self == SHIELD_DELAY_A::D20NS
}
}
#[doc = "Write proxy for field `SHIELD_DELAY`"]
pub struct SHIELD_DELAY_W<'a> {
w: &'a mut W,
}
impl<'a> SHIELD_DELAY_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SHIELD_DELAY_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Delay line is off; sensor clock = shield clock"]
#[inline(always)]
pub fn off(self) -> &'a mut W {
self.variant(SHIELD_DELAY_A::OFF)
}
#[doc = "shield clock is delayed by 5ns delay w.r.t sensor clock"]
#[inline(always)]
pub fn d5ns(self) -> &'a mut W {
self.variant(SHIELD_DELAY_A::D5NS)
}
#[doc = "shield clock is delayed by 10ns delay w.r.t sensor clock"]
#[inline(always)]
pub fn d10ns(self) -> &'a mut W {
self.variant(SHIELD_DELAY_A::D10NS)
}
#[doc = "shield clock is delayed by 20ns delay w.r.t sensor clock"]
#[inline(always)]
pub fn d20ns(self) -> &'a mut W {
self.variant(SHIELD_DELAY_A::D20NS)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);
self.w
}
}
#[doc = "Reader of field `SENSE_EN`"]
pub type SENSE_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SENSE_EN`"]
pub struct SENSE_EN_W<'a> {
w: &'a mut W,
}
impl<'a> SENSE_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FULL_WAVE_A {
#[doc = "0: Half Wave mode"]
HALFWAVE,
#[doc = "1: Full Wave mode"]
FULLWAVE,
}
impl From<FULL_WAVE_A> for bool {
#[inline(always)]
fn from(variant: FULL_WAVE_A) -> Self {
match variant {
FULL_WAVE_A::HALFWAVE => false,
FULL_WAVE_A::FULLWAVE => true,
}
}
}
#[doc = "Reader of field `FULL_WAVE`"]
pub type FULL_WAVE_R = crate::R<bool, FULL_WAVE_A>;
impl FULL_WAVE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FULL_WAVE_A {
match self.bits {
false => FULL_WAVE_A::HALFWAVE,
true => FULL_WAVE_A::FULLWAVE,
}
}
#[doc = "Checks if the value of the field is `HALFWAVE`"]
#[inline(always)]
pub fn is_halfwave(&self) -> bool {
*self == FULL_WAVE_A::HALFWAVE
}
#[doc = "Checks if the value of the field is `FULLWAVE`"]
#[inline(always)]
pub fn is_fullwave(&self) -> bool {
*self == FULL_WAVE_A::FULLWAVE
}
}
#[doc = "Write proxy for field `FULL_WAVE`"]
pub struct FULL_WAVE_W<'a> {
w: &'a mut W,
}
impl<'a> FULL_WAVE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FULL_WAVE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Half Wave mode"]
#[inline(always)]
pub fn halfwave(self) -> &'a mut W {
self.variant(FULL_WAVE_A::HALFWAVE)
}
#[doc = "Full Wave mode"]
#[inline(always)]
pub fn fullwave(self) -> &'a mut W {
self.variant(FULL_WAVE_A::FULLWAVE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MUTUAL_CAP_A {
#[doc = "0: Self-cap mode"]
SELFCAP,
#[doc = "1: Mutual-cap mode"]
MUTUALCAP,
}
impl From<MUTUAL_CAP_A> for bool {
#[inline(always)]
fn from(variant: MUTUAL_CAP_A) -> Self {
match variant {
MUTUAL_CAP_A::SELFCAP => false,
MUTUAL_CAP_A::MUTUALCAP => true,
}
}
}
#[doc = "Reader of field `MUTUAL_CAP`"]
pub type MUTUAL_CAP_R = crate::R<bool, MUTUAL_CAP_A>;
impl MUTUAL_CAP_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MUTUAL_CAP_A {
match self.bits {
false => MUTUAL_CAP_A::SELFCAP,
true => MUTUAL_CAP_A::MUTUALCAP,
}
}
#[doc = "Checks if the value of the field is `SELFCAP`"]
#[inline(always)]
pub fn is_selfcap(&self) -> bool {
*self == MUTUAL_CAP_A::SELFCAP
}
#[doc = "Checks if the value of the field is `MUTUALCAP`"]
#[inline(always)]
pub fn is_mutualcap(&self) -> bool {
*self == MUTUAL_CAP_A::MUTUALCAP
}
}
#[doc = "Write proxy for field `MUTUAL_CAP`"]
pub struct MUTUAL_CAP_W<'a> {
w: &'a mut W,
}
impl<'a> MUTUAL_CAP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MUTUAL_CAP_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Self-cap mode"]
#[inline(always)]
pub fn selfcap(self) -> &'a mut W {
self.variant(MUTUAL_CAP_A::SELFCAP)
}
#[doc = "Mutual-cap mode"]
#[inline(always)]
pub fn mutualcap(self) -> &'a mut W {
self.variant(MUTUAL_CAP_A::MUTUALCAP)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CSX_DUAL_CNT_A {
#[doc = "0: N/A"]
ONE,
#[doc = "1: N/A"]
TWO,
}
impl From<CSX_DUAL_CNT_A> for bool {
#[inline(always)]
fn from(variant: CSX_DUAL_CNT_A) -> Self {
match variant {
CSX_DUAL_CNT_A::ONE => false,
CSX_DUAL_CNT_A::TWO => true,
}
}
}
#[doc = "Reader of field `CSX_DUAL_CNT`"]
pub type CSX_DUAL_CNT_R = crate::R<bool, CSX_DUAL_CNT_A>;
impl CSX_DUAL_CNT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CSX_DUAL_CNT_A {
match self.bits {
false => CSX_DUAL_CNT_A::ONE,
true => CSX_DUAL_CNT_A::TWO,
}
}
#[doc = "Checks if the value of the field is `ONE`"]
#[inline(always)]
pub fn is_one(&self) -> bool {
*self == CSX_DUAL_CNT_A::ONE
}
#[doc = "Checks if the value of the field is `TWO`"]
#[inline(always)]
pub fn is_two(&self) -> bool {
*self == CSX_DUAL_CNT_A::TWO
}
}
#[doc = "Write proxy for field `CSX_DUAL_CNT`"]
pub struct CSX_DUAL_CNT_W<'a> {
w: &'a mut W,
}
impl<'a> CSX_DUAL_CNT_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CSX_DUAL_CNT_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn one(self) -> &'a mut W {
self.variant(CSX_DUAL_CNT_A::ONE)
}
#[doc = "N/A"]
#[inline(always)]
pub fn two(self) -> &'a mut W {
self.variant(CSX_DUAL_CNT_A::TWO)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "N/A\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DSI_COUNT_SEL_A {
#[doc = "0: N/A"]
CSD_RESULT,
#[doc = "1: N/A"]
ADC_RESULT,
}
impl From<DSI_COUNT_SEL_A> for bool {
#[inline(always)]
fn from(variant: DSI_COUNT_SEL_A) -> Self {
match variant {
DSI_COUNT_SEL_A::CSD_RESULT => false,
DSI_COUNT_SEL_A::ADC_RESULT => true,
}
}
}
#[doc = "Reader of field `DSI_COUNT_SEL`"]
pub type DSI_COUNT_SEL_R = crate::R<bool, DSI_COUNT_SEL_A>;
impl DSI_COUNT_SEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DSI_COUNT_SEL_A {
match self.bits {
false => DSI_COUNT_SEL_A::CSD_RESULT,
true => DSI_COUNT_SEL_A::ADC_RESULT,
}
}
#[doc = "Checks if the value of the field is `CSD_RESULT`"]
#[inline(always)]
pub fn is_csd_result(&self) -> bool {
*self == DSI_COUNT_SEL_A::CSD_RESULT
}
#[doc = "Checks if the value of the field is `ADC_RESULT`"]
#[inline(always)]
pub fn is_adc_result(&self) -> bool {
*self == DSI_COUNT_SEL_A::ADC_RESULT
}
}
#[doc = "Write proxy for field `DSI_COUNT_SEL`"]
pub struct DSI_COUNT_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> DSI_COUNT_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DSI_COUNT_SEL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "N/A"]
#[inline(always)]
pub fn csd_result(self) -> &'a mut W {
self.variant(DSI_COUNT_SEL_A::CSD_RESULT)
}
#[doc = "N/A"]
#[inline(always)]
pub fn adc_result(self) -> &'a mut W {
self.variant(DSI_COUNT_SEL_A::ADC_RESULT)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `DSI_SAMPLE_EN`"]
pub type DSI_SAMPLE_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DSI_SAMPLE_EN`"]
pub struct DSI_SAMPLE_EN_W<'a> {
w: &'a mut W,
}
impl<'a> DSI_SAMPLE_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `SAMPLE_SYNC`"]
pub type SAMPLE_SYNC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SAMPLE_SYNC`"]
pub struct SAMPLE_SYNC_W<'a> {
w: &'a mut W,
}
impl<'a> SAMPLE_SYNC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `DSI_SENSE_EN`"]
pub type DSI_SENSE_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DSI_SENSE_EN`"]
pub struct DSI_SENSE_EN_W<'a> {
w: &'a mut W,
}
impl<'a> DSI_SENSE_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "Reader of field `LP_MODE`"]
pub type LP_MODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LP_MODE`"]
pub struct LP_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> LP_MODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `ENABLE`"]
pub type ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLE`"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - N/A"]
#[inline(always)]
pub fn iref_sel(&self) -> IREF_SEL_R {
IREF_SEL_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 4:8 - Enables the digital filtering on the CSD comparator"]
#[inline(always)]
pub fn filter_delay(&self) -> FILTER_DELAY_R {
FILTER_DELAY_R::new(((self.bits >> 4) & 0x1f) as u8)
}
#[doc = "Bits 10:11 - Configures the delay between shield clock and sensor clock"]
#[inline(always)]
pub fn shield_delay(&self) -> SHIELD_DELAY_R {
SHIELD_DELAY_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bit 12 - Enables the sensor and shield clocks, CSD modulator output and turns on the IDAC compensation current as selected by CSD_IDAC."]
#[inline(always)]
pub fn sense_en(&self) -> SENSE_EN_R {
SENSE_EN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 17 - N/A"]
#[inline(always)]
pub fn full_wave(&self) -> FULL_WAVE_R {
FULL_WAVE_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - N/A"]
#[inline(always)]
pub fn mutual_cap(&self) -> MUTUAL_CAP_R {
MUTUAL_CAP_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - N/A"]
#[inline(always)]
pub fn csx_dual_cnt(&self) -> CSX_DUAL_CNT_R {
CSX_DUAL_CNT_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 24 - N/A"]
#[inline(always)]
pub fn dsi_count_sel(&self) -> DSI_COUNT_SEL_R {
DSI_COUNT_SEL_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - DSI_SAMPLE_EN = 1 -> COUNTER will count the samples generated by DSI DSI_SAMPLE_EN = 0 -> COUNTER will count the samples generated by CSD modulator"]
#[inline(always)]
pub fn dsi_sample_en(&self) -> DSI_SAMPLE_EN_R {
DSI_SAMPLE_EN_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - N/A"]
#[inline(always)]
pub fn sample_sync(&self) -> SAMPLE_SYNC_R {
SAMPLE_SYNC_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 27 - DSI_SENSE_EN = 1-> sensor clock is driven directly by DSI DSI_SENSE_EN = 0-> sensor clock is driven by PRS/divide-by-2/DIRECT_CLOCK"]
#[inline(always)]
pub fn dsi_sense_en(&self) -> DSI_SENSE_EN_R {
DSI_SENSE_EN_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 30 - N/A"]
#[inline(always)]
pub fn lp_mode(&self) -> LP_MODE_R {
LP_MODE_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - N/A"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - N/A"]
#[inline(always)]
pub fn iref_sel(&mut self) -> IREF_SEL_W {
IREF_SEL_W { w: self }
}
#[doc = "Bits 4:8 - Enables the digital filtering on the CSD comparator"]
#[inline(always)]
pub fn filter_delay(&mut self) -> FILTER_DELAY_W {
FILTER_DELAY_W { w: self }
}
#[doc = "Bits 10:11 - Configures the delay between shield clock and sensor clock"]
#[inline(always)]
pub fn shield_delay(&mut self) -> SHIELD_DELAY_W {
SHIELD_DELAY_W { w: self }
}
#[doc = "Bit 12 - Enables the sensor and shield clocks, CSD modulator output and turns on the IDAC compensation current as selected by CSD_IDAC."]
#[inline(always)]
pub fn sense_en(&mut self) -> SENSE_EN_W {
SENSE_EN_W { w: self }
}
#[doc = "Bit 17 - N/A"]
#[inline(always)]
pub fn full_wave(&mut self) -> FULL_WAVE_W {
FULL_WAVE_W { w: self }
}
#[doc = "Bit 18 - N/A"]
#[inline(always)]
pub fn mutual_cap(&mut self) -> MUTUAL_CAP_W {
MUTUAL_CAP_W { w: self }
}
#[doc = "Bit 19 - N/A"]
#[inline(always)]
pub fn csx_dual_cnt(&mut self) -> CSX_DUAL_CNT_W {
CSX_DUAL_CNT_W { w: self }
}
#[doc = "Bit 24 - N/A"]
#[inline(always)]
pub fn dsi_count_sel(&mut self) -> DSI_COUNT_SEL_W {
DSI_COUNT_SEL_W { w: self }
}
#[doc = "Bit 25 - DSI_SAMPLE_EN = 1 -> COUNTER will count the samples generated by DSI DSI_SAMPLE_EN = 0 -> COUNTER will count the samples generated by CSD modulator"]
#[inline(always)]
pub fn dsi_sample_en(&mut self) -> DSI_SAMPLE_EN_W {
DSI_SAMPLE_EN_W { w: self }
}
#[doc = "Bit 26 - N/A"]
#[inline(always)]
pub fn sample_sync(&mut self) -> SAMPLE_SYNC_W {
SAMPLE_SYNC_W { w: self }
}
#[doc = "Bit 27 - DSI_SENSE_EN = 1-> sensor clock is driven directly by DSI DSI_SENSE_EN = 0-> sensor clock is driven by PRS/divide-by-2/DIRECT_CLOCK"]
#[inline(always)]
pub fn dsi_sense_en(&mut self) -> DSI_SENSE_EN_W {
DSI_SENSE_EN_W { w: self }
}
#[doc = "Bit 30 - N/A"]
#[inline(always)]
pub fn lp_mode(&mut self) -> LP_MODE_W {
LP_MODE_W { w: self }
}
#[doc = "Bit 31 - N/A"]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
}
|
use std::collections::HashMap;
impl Solution {
pub fn find_max_length(nums: Vec<i32>) -> i32 {
let (mut ans,n,mut cur) = (0,nums.len(),0);
let mut mp = HashMap::new();
let mut cnt = 0;
mp.insert(0, -1);
for (idx,num) in nums.into_iter().enumerate(){
match num {
1 =>{
cur = 1;
},
_ =>{
cur = -1;
}
}
cnt += cur;
if mp.contains_key(&cnt){
ans = ans.max(idx as i32 - mp[&cnt]);
}else{
mp.entry(cnt).or_insert(idx as i32);
}
}
ans
}
} |
//! Virtual machine flags.
bitflags::bitflags! {
#[derive(Default)]
pub struct VmKey: u16 {
const KEY_0 = 0b0000000000000001;
const KEY_1 = 0b0000000000000010;
const KEY_2 = 0b0000000000000100;
const KEY_3 = 0b0000000000001000;
const KEY_4 = 0b0000000000010000;
const KEY_5 = 0b0000000000100000;
const KEY_6 = 0b0000000001000000;
const KEY_7 = 0b0000000010000000;
const KEY_8 = 0b0000000100000000;
const KEY_9 = 0b0000001000000000;
const KEY_A = 0b0000010000000000;
const KEY_B = 0b0000100000000000;
const KEY_C = 0b0001000000000000;
const KEY_D = 0b0010000000000000;
const KEY_E = 0b0100000000000000;
const KEY_F = 0b1000000000000000;
}
}
impl VmKey {
/// Gets the list of activated keys in the given key mask.
pub fn to_vec(self) -> Vec<u8> {
let mut keys = Vec::new();
if self.contains(VmKey::KEY_0) {
keys.push(VmKey::KEY_0.into());
}
if self.contains(VmKey::KEY_1) {
keys.push(VmKey::KEY_1.into());
}
if self.contains(VmKey::KEY_2) {
keys.push(VmKey::KEY_2.into());
}
if self.contains(VmKey::KEY_3) {
keys.push(VmKey::KEY_3.into());
}
if self.contains(VmKey::KEY_4) {
keys.push(VmKey::KEY_4.into());
}
if self.contains(VmKey::KEY_5) {
keys.push(VmKey::KEY_5.into());
}
if self.contains(VmKey::KEY_6) {
keys.push(VmKey::KEY_6.into());
}
if self.contains(VmKey::KEY_7) {
keys.push(VmKey::KEY_7.into());
}
if self.contains(VmKey::KEY_8) {
keys.push(VmKey::KEY_8.into());
}
if self.contains(VmKey::KEY_9) {
keys.push(VmKey::KEY_9.into());
}
if self.contains(VmKey::KEY_A) {
keys.push(VmKey::KEY_A.into());
}
if self.contains(VmKey::KEY_B) {
keys.push(VmKey::KEY_B.into());
}
if self.contains(VmKey::KEY_C) {
keys.push(VmKey::KEY_C.into());
}
if self.contains(VmKey::KEY_D) {
keys.push(VmKey::KEY_D.into());
}
if self.contains(VmKey::KEY_E) {
keys.push(VmKey::KEY_E.into());
}
if self.contains(VmKey::KEY_F) {
keys.push(VmKey::KEY_F.into());
}
keys
}
}
impl From<u8> for VmKey {
fn from(key: u8) -> Self {
match key {
0 => VmKey::KEY_0,
1 => VmKey::KEY_1,
2 => VmKey::KEY_2,
3 => VmKey::KEY_3,
4 => VmKey::KEY_4,
5 => VmKey::KEY_5,
6 => VmKey::KEY_6,
7 => VmKey::KEY_7,
8 => VmKey::KEY_8,
9 => VmKey::KEY_9,
10 => VmKey::KEY_A,
11 => VmKey::KEY_B,
12 => VmKey::KEY_C,
13 => VmKey::KEY_D,
14 => VmKey::KEY_E,
15 => VmKey::KEY_F,
_ => VmKey::empty(),
}
}
}
impl From<VmKey> for u8 {
fn from(key: VmKey) -> Self {
match key {
VmKey::KEY_0 => 0,
VmKey::KEY_1 => 1,
VmKey::KEY_2 => 2,
VmKey::KEY_3 => 3,
VmKey::KEY_4 => 4,
VmKey::KEY_5 => 5,
VmKey::KEY_6 => 6,
VmKey::KEY_7 => 7,
VmKey::KEY_8 => 8,
VmKey::KEY_9 => 9,
VmKey::KEY_A => 10,
VmKey::KEY_B => 11,
VmKey::KEY_C => 12,
VmKey::KEY_D => 13,
VmKey::KEY_E => 14,
VmKey::KEY_F => 15,
_ => 0xff,
}
}
}
|
#[doc = "Reader of register INTERP0_POP_FULL"]
pub type R = crate::R<u32, super::INTERP0_POP_FULL>;
impl R {}
|
use std::collections;
use std::convert::TryFrom;
use std::sync::{Arc, RwLock};
use anyhow::{anyhow, Error};
use rustls::server;
use rustls::server::ClientHello;
use rustls::sign;
/// Something that resolves do different cert chains/keys based
/// on client-supplied server name (via SNI).
/// Support add certificate dynamically. Using RwLock for read/write mutex.
pub struct MutResolvesServerCertUsingSni {
by_name: RwLock<collections::HashMap<String, Arc<sign::CertifiedKey>>>,
}
impl MutResolvesServerCertUsingSni {
/// Create a new and empty (i.e., knows no certificates) resolver.
pub fn new() -> Self {
Self {
by_name: RwLock::new(collections::HashMap::new()),
}
}
/// Add a new `sign::CertifiedKey` to be used for the given SNI `name`.
///
/// This function fails if `name` is not a valid DNS name, or if
/// it's not valid for the supplied certificate, or if the certificate
/// chain is syntactically faulty.
pub fn add(&self, name: &str, ck: sign::CertifiedKey) -> Result<(), Error> {
let checked_name = webpki::DnsNameRef::try_from_ascii_str(name)
.map_err(|_| anyhow!("Bad DNS name".to_string()))?;
cross_check_end_entity_cert(&ck, Some(checked_name))?;
self.by_name
.write()
.unwrap()
.insert(name.into(), Arc::new(ck));
Ok(())
}
}
impl server::ResolvesServerCert for MutResolvesServerCertUsingSni {
fn resolve(&self, client_hello: ClientHello) -> Option<Arc<sign::CertifiedKey>> {
if let Some(name) = client_hello.server_name() {
self.by_name.read().unwrap().get(name).map(Arc::clone)
} else {
// This kind of resolver requires SNI
None
}
}
}
/// Check the certificate chain for validity:
/// - it should be non-empty list
/// - the first certificate should be parsable as a x509v3,
/// - the first certificate should quote the given server name
/// (if provided)
///
/// These checks are not security-sensitive. They are the
/// *server* attempting to detect accidental misconfiguration.
pub(crate) fn cross_check_end_entity_cert(
ck: &sign::CertifiedKey,
name: Option<webpki::DnsNameRef>,
) -> Result<(), Error> {
// Always reject an empty certificate chain.
let end_entity_cert = ck
.end_entity_cert()
.map_err(|_| anyhow!("No end-entity certificate in certificate chain".to_string()))?;
// Reject syntactically-invalid end-entity certificates.
let end_entity_cert =
webpki::EndEntityCert::try_from(end_entity_cert.as_ref()).map_err(|_| {
anyhow!("End-entity certificate in certificate \
chain is syntactically invalid"
.to_string(),)
})?;
if let Some(name) = name {
// If SNI was offered then the certificate must be valid for
// that hostname. Note that this doesn't fully validate that the
// certificate is valid; it only validates that the name is one
// that the certificate is valid for, if the certificate is
// valid.
if end_entity_cert.verify_is_valid_for_dns_name(name).is_err() {
return Err(anyhow!("The server certificate is not \
valid for the given name"
.to_string(),));
}
}
Ok(())
}
|
// Problem 29 - Distinct powers
//
// Consider all integer combinations of a^b for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
//
// 2^2=4, 2^3=8, 2^4=16, 2^5=32
// 3^2=9, 3^3=27, 3^4=81, 3^5=243
// 4^2=16, 4^3=64, 4^4=256, 4^5=1024
// 5^2=25, 5^3=125, 5^4=625, 5^5=3125
//
// If they are then placed in numerical order, with any repeats removed, we get
// the following sequence of 15 distinct terms:
//
// 4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
//
// How many distinct terms are in the sequence generated by a^b for 2 ≤ a ≤ 100
// and 2 ≤ b ≤ 100?
use std::collections::HashSet;
type Factor = (u32, u32);
fn main() {
println!("{}", solution());
}
fn solution() -> usize {
let mut powers = HashSet::new();
// Sufficient for factoring integers <= 100
let primes = vec![2, 3, 5, 7];
for a in 2..101 {
for b in 2..101 {
powers.insert(exponentiate(&factor(a, &primes), b));
}
}
powers.len()
}
fn factor(n: u32, primes: &Vec<u32>) -> Vec<Factor> {
let mut factors = Vec::new();
let mut current = n;
for &p in primes {
if current % p == 0 {
let mut multiplicity = 0;
while current % p == 0 {
multiplicity += 1;
current /= p;
}
factors.push((p, multiplicity));
}
if current == 1 {
break;
}
}
if current > 1 {
factors.push((current, 1));
}
factors
}
fn exponentiate(factors: &Vec<Factor>, k: u32) -> Vec<Factor> {
let mut exp = Vec::new();
for &(p, m) in factors {
exp.push((p, m*k));
}
exp
}
|
use crate::ci::GenerateCI;
use crate::BridgeModel;
use anyhow::{bail, Context, Result};
use console::style;
use dialoguer::{theme::ColorfulTheme, Select};
use fs_err as fs;
use minijinja::{context, Environment};
use std::path::Path;
/// Mixed Rust/Python project layout
#[derive(Debug, Clone, Copy)]
enum ProjectLayout {
Mixed { src: bool },
PureRust,
}
struct ProjectGenerator<'a> {
env: Environment<'a>,
project_name: String,
crate_name: String,
bindings: String,
layout: ProjectLayout,
ci_config: String,
overwrite: bool,
}
impl<'a> ProjectGenerator<'a> {
fn new(
project_name: String,
layout: ProjectLayout,
bindings: String,
overwrite: bool,
) -> Result<Self> {
let crate_name = project_name.replace('-', "_");
let mut env = Environment::new();
env.add_template(".gitignore", include_str!("templates/.gitignore.j2"))?;
env.add_template("Cargo.toml", include_str!("templates/Cargo.toml.j2"))?;
env.add_template(
"pyproject.toml",
include_str!("templates/pyproject.toml.j2"),
)?;
env.add_template("lib.rs", include_str!("templates/lib.rs.j2"))?;
env.add_template("main.rs", include_str!("templates/main.rs.j2"))?;
env.add_template("build.rs", include_str!("templates/build.rs.j2"))?;
env.add_template("__init__.py", include_str!("templates/__init__.py.j2"))?;
env.add_template("example.udl", include_str!("templates/example.udl.j2"))?;
let bridge_model = match bindings.as_str() {
"bin" => BridgeModel::Bin(None),
"cffi" => BridgeModel::Cffi,
"uniffi" => BridgeModel::UniFfi,
_ => BridgeModel::Bindings(bindings.clone(), 7),
};
let ci_config =
GenerateCI::default().generate_github(&project_name, &bridge_model, true)?;
Ok(Self {
env,
project_name,
crate_name,
bindings,
layout,
ci_config,
overwrite,
})
}
fn generate(&self, project_path: &Path) -> Result<()> {
fs::create_dir_all(project_path)?;
self.write_project_file(project_path, ".gitignore")?;
self.write_project_file(project_path, "pyproject.toml")?;
// CI configuration
let gh_action_path = project_path.join(".github").join("workflows");
fs::create_dir_all(&gh_action_path)?;
self.write_content(&gh_action_path, "CI.yml", self.ci_config.as_bytes())?;
let rust_project = match self.layout {
ProjectLayout::Mixed { src } => {
let python_dir = if src {
project_path.join("src")
} else {
project_path.join("python")
};
let python_project = python_dir.join(&self.crate_name);
fs::create_dir_all(&python_project)?;
self.write_project_file(&python_project, "__init__.py")?;
if src {
project_path.join("rust")
} else {
project_path.to_path_buf()
}
}
ProjectLayout::PureRust => project_path.to_path_buf(),
};
let rust_src = rust_project.join("src");
fs::create_dir_all(&rust_src)?;
self.write_project_file(&rust_project, "Cargo.toml")?;
if self.bindings == "bin" {
self.write_project_file(&rust_src, "main.rs")?;
} else {
self.write_project_file(&rust_src, "lib.rs")?;
if self.bindings == "uniffi" {
self.write_project_file(&rust_project, "build.rs")?;
self.write_project_file(&rust_src, "example.udl")?;
}
}
Ok(())
}
fn render_template(&self, tmpl_name: &str) -> Result<String> {
let version_major: usize = env!("CARGO_PKG_VERSION_MAJOR").parse().unwrap();
let version_minor: usize = env!("CARGO_PKG_VERSION_MINOR").parse().unwrap();
let tmpl = self.env.get_template(tmpl_name)?;
let out = tmpl.render(context!(
name => self.project_name,
crate_name => self.crate_name,
bindings => self.bindings,
mixed_non_src => matches!(self.layout, ProjectLayout::Mixed { src: false }),
version_major => version_major,
version_minor => version_minor
))?;
Ok(out)
}
fn write_project_file(&self, directory: &Path, file: &str) -> Result<()> {
let content = self.render_template(file)?;
self.write_content(directory, file, content.as_bytes())
}
fn write_content(&self, directory: &Path, file: &str, content: &[u8]) -> Result<()> {
let path = directory.join(file);
if self.overwrite || !path.exists() {
fs::write(path, content)?;
}
Ok(())
}
}
/// Options common to `maturin new` and `maturin init`.
#[derive(Debug, clap::Parser)]
pub struct GenerateProjectOptions {
/// Set the resulting package name, defaults to the directory name
#[arg(long)]
name: Option<String>,
/// Use mixed Rust/Python project layout
#[arg(long)]
mixed: bool,
/// Use Python first src layout for mixed Rust/Python project
#[arg(long)]
src: bool,
/// Which kind of bindings to use
#[arg(
short,
long,
value_parser = ["pyo3", "rust-cpython", "cffi", "uniffi", "bin"]
)]
bindings: Option<String>,
}
/// Generate a new cargo project
pub fn new_project(path: String, options: GenerateProjectOptions) -> Result<()> {
let project_path = Path::new(&path);
if project_path.exists() {
bail!("destination `{}` already exists", project_path.display());
}
generate_project(project_path, options, true)?;
eprintln!(
" ✨ {} {} {}",
style("Done!").bold().green(),
style("New project created").bold(),
style(&project_path.display()).underlined()
);
Ok(())
}
/// Generate a new cargo project in an existing directory
pub fn init_project(path: Option<String>, options: GenerateProjectOptions) -> Result<()> {
let project_path = path
.map(Into::into)
.map_or_else(std::env::current_dir, Ok)?;
if project_path.join("pyproject.toml").exists() || project_path.join("Cargo.toml").exists() {
bail!("`maturin init` cannot be run on existing projects");
}
generate_project(&project_path, options, false)?;
eprintln!(
" ✨ {} {} {}",
style("Done!").bold().green(),
style("Initialized project").bold(),
style(&project_path.display()).underlined()
);
Ok(())
}
fn generate_project(
project_path: &Path,
options: GenerateProjectOptions,
overwrite: bool,
) -> Result<()> {
let name = if let Some(name) = options.name {
name
} else {
let file_name = project_path.file_name().with_context(|| {
format!("Failed to get name from path '{}'", project_path.display())
})?;
file_name
.to_str()
.context("Filename isn't valid Unicode")?
.to_string()
};
let bindings_items = if options.mixed {
vec!["pyo3", "rust-cpython", "cffi", "uniffi"]
} else {
vec!["pyo3", "rust-cpython", "cffi", "uniffi", "bin"]
};
let bindings = if let Some(bindings) = options.bindings {
bindings
} else {
let selection = Select::with_theme(&ColorfulTheme::default())
.with_prompt(format!(
"🤷 {}\n 📖 {}",
style("Which kind of bindings to use?").bold(),
style("Documentation: https://maturin.rs/bindings.html").dim()
))
.items(&bindings_items)
.default(0)
.interact()?;
bindings_items[selection].to_string()
};
let layout = if options.mixed {
ProjectLayout::Mixed { src: options.src }
} else {
ProjectLayout::PureRust
};
let generator = ProjectGenerator::new(name, layout, bindings, overwrite)?;
generator.generate(project_path)
}
|
use nom::{branch::alt, bytes::complete::tag, multi::fold_many1, IResult};
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
#[derive(Clone, PartialEq)]
enum Direction {
East,
SouthEast,
SouthWest,
West,
NorthWest,
NorthEast,
}
// represents co-ordinates on a hex grid. The "x-axis" is a horizontal line,
// while the "y-axis" runs NW to SE
#[derive(Clone, Copy)]
struct HexPosition {
x: isize,
y: isize,
}
struct Floor {
flipped: HashMap<(isize, isize), bool>,
}
impl Floor {
fn new() -> Floor {
Floor {
flipped: HashMap::new(),
}
}
fn toggle_position(&mut self, pos: HexPosition) -> () {
let already_flipped = self.flipped.get(&(pos.x, pos.y)).unwrap_or(&false);
let toggled = !already_flipped;
self.flipped.insert((pos.x, pos.y), toggled);
}
fn from_vector(v: Vec<HexPosition>) -> Floor {
let mut floor = Floor::new();
for hex in v {
floor.toggle_position(hex);
}
floor
}
fn count_flipped(&self) -> usize {
self.flipped.values().filter(|&&flipped| flipped).count()
}
fn flipped_neighbours(&self, pos: HexPosition) -> usize {
let HexPosition { x, y } = pos;
let neighbours = [
(x - 1, y),
(x, y - 1),
(x + 1, y - 1),
(x + 1, y),
(x, y + 1),
(x - 1, y + 1),
];
neighbours
.iter()
.filter(|&(xpos, ypos)| *self.flipped.get(&(*xpos, *ypos)).unwrap_or(&false))
.count()
}
fn evolve_once(&mut self) -> () {
let mut to_flip = vec![];
// the following tiles should be flipped
// 1) any currently-flipped tiles which have any number of flipped neighbours other than 1 or 2
// 2) any tile that is currently not flipped, which has exactly 2 flipped neighbours
// 3) as in 2), but remember to take into account any tiles which are not yet in the hashmap!
// So first we build up a vector of all keys, making sure to include all neighbouring hexes of
// all those we have
let all_keys: Vec<(isize, isize)> =
self.flipped.keys().map(|pair| pair.to_owned()).collect();
let mut new_keys = all_keys.to_vec();
for &(x, y) in all_keys.iter() {
let neighbours = [
(x - 1, y),
(x, y - 1),
(x + 1, y - 1),
(x + 1, y),
(x, y + 1),
(x - 1, y + 1),
];
for neighbour in &neighbours {
if !new_keys.contains(&neighbour) {
new_keys.push(*neighbour);
}
}
}
for key in new_keys {
let is_flipped = *self.flipped.get(&key).unwrap_or(&false);
let flipped_neighbours = self.flipped_neighbours(HexPosition { x: key.0, y: key.1 });
if (is_flipped && flipped_neighbours != 1 && flipped_neighbours != 2)
|| (!is_flipped && flipped_neighbours == 2)
{
to_flip.push(key);
}
}
to_flip
.iter()
.for_each(|&pos| self.toggle_position(HexPosition { x: pos.0, y: pos.1 }));
}
fn evolve(&mut self, times: usize) -> () {
for _ in 0..times {
self.evolve_once();
}
}
}
fn movement(pos: HexPosition, dir: Direction) -> HexPosition {
match dir {
Direction::East => HexPosition {
x: pos.x + 1,
y: pos.y,
},
Direction::SouthEast => HexPosition {
x: pos.x,
y: pos.y + 1,
},
Direction::SouthWest => HexPosition {
x: pos.x - 1,
y: pos.y + 1,
},
Direction::West => HexPosition {
x: pos.x - 1,
y: pos.y,
},
Direction::NorthWest => HexPosition {
x: pos.x,
y: pos.y - 1,
},
Direction::NorthEast => HexPosition {
x: pos.x + 1,
y: pos.y - 1,
},
}
}
fn read_file() -> Vec<HexPosition> {
let mut file = File::open("./input/input24.txt").unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
contents.lines().map(parse_line).collect()
}
fn parse_line(s: &str) -> HexPosition {
let mut parser = fold_many1(
alt((
tag("e"),
tag("se"),
tag("sw"),
tag("w"),
tag("nw"),
tag("ne"),
)),
HexPosition { x: 0, y: 0 },
|current, newdir| {
if newdir == "e" {
movement(current, Direction::East)
} else if newdir == "se" {
movement(current, Direction::SouthEast)
} else if newdir == "sw" {
movement(current, Direction::SouthWest)
} else if newdir == "w" {
movement(current, Direction::West)
} else if newdir == "nw" {
movement(current, Direction::NorthWest)
} else if newdir == "ne" {
movement(current, Direction::NorthEast)
} else {
panic!("this can't happen!")
}
},
);
let result: IResult<&str, HexPosition> = parser(s);
result.unwrap().1
}
fn solve_part_1(hexes_flipped: Vec<HexPosition>) -> usize {
let floor = Floor::from_vector(hexes_flipped);
floor.count_flipped()
}
pub fn part_1() -> usize {
let hexes_flipped = read_file();
solve_part_1(hexes_flipped)
}
// ran for around 10 minutes via cargo run - but after compilation in release mode,
// went down to around 10 seconds!
fn solve_part_2(hexes_flipped: Vec<HexPosition>) -> usize {
let mut floor = Floor::from_vector(hexes_flipped);
floor.evolve(100);
floor.count_flipped()
}
pub fn part_2() -> usize {
let hexes_flipped = read_file();
solve_part_2(hexes_flipped)
}
|
#![cfg_attr(all(feature = "nightly", test), feature(test))]
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
extern crate chrono;
extern crate core;
extern crate libc;
extern crate pbr;
extern crate unix_daemonize;
extern crate byteorder;
extern crate udt;
extern crate ring;
extern crate colored;
extern crate hex;
// crates needed for unit tests
#[cfg(test)]
extern crate rand;
pub mod connection;
pub mod ssh;
pub mod file;
pub mod progress;
use byteorder::{ReadBytesExt, WriteBytesExt, LittleEndian};
use colored::*;
use connection::{PortRange, Transceiver};
use log::{Record, Level, Metadata};
use hex::ToHex;
use std::net::{SocketAddr, IpAddr};
use std::fs::File;
use std::io;
use std::io::{Cursor, SeekFrom, Write};
use std::path::{Path, PathBuf};
use std::{str, env, thread, fmt};
use std::str::FromStr;
use std::sync::mpsc;
use chrono::Utc;
use std::time::{Instant, Duration};
use progress::Progress;
use unix_daemonize::{daemonize_redirect, ChdirMode};
use file::ReadMsg;
// TODO config
const INITIAL_ACCEPT_TIMEOUT_SECONDS: u64 = 60;
const RECONNECT_ACCEPT_TIMEOUT_SECONDS: u64 = 21600;
macro_rules! overprint {
($fmt: expr) => {
if log_enabled!(Level::Debug) {
// println!($fmt);
} else {
print!(concat!("\x1b[2K\r", $fmt));
std::io::stdout().flush().unwrap();
}
};
($fmt:expr, $($arg:tt)*) => {
if log_enabled!(Level::Debug) {
// println!($fmt, $($arg)*);
} else {
print!(concat!("\x1b[2K\r", $fmt), $($arg)*);
std::io::stdout().flush().unwrap();
}
};
}
#[macro_export]
macro_rules! die {
($fmt: expr) => {
error!($fmt);
panic!($fmt)
};
($fmt:expr, $($arg:tt)*) => {
error!($fmt, $($arg)*);
panic!($fmt, $($arg)*)
};
}
pub struct Server {
pub ip: String,
filename: String,
conn: connection::Server,
}
pub type LocalTarget = PathBuf;
pub type RemoteTarget = (String, PathBuf);
#[derive(Clone)]
pub enum Target {
Local(LocalTarget),
Remote(RemoteTarget),
}
#[derive(Clone)]
enum TransferState {
Send(LocalTarget, RemoteTarget),
Receive(RemoteTarget, LocalTarget),
}
pub struct Client {
port_range: PortRange,
transfer_state: TransferState,
}
#[derive(Clone, Copy)]
pub enum ShoopMode {
Server,
Client,
}
#[derive(Clone, Copy)]
pub enum LogVerbosity {
Normal,
Debug,
}
impl LogVerbosity {
fn to_log_level(self, mode: ShoopMode) -> Level {
match self {
LogVerbosity::Debug => Level::Debug,
LogVerbosity::Normal => {
match mode {
ShoopMode::Server => Level::Info,
ShoopMode::Client => Level::Error,
}
}
}
}
}
#[derive(Clone, Copy)]
pub enum TransferMode {
Send,
Receive,
}
#[derive(Clone, Copy)]
pub enum ServerErr {
SshEnv = 0,
File,
}
impl fmt::Display for ServerErr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let pretty = match *self {
ServerErr::SshEnv => {
"SSH_CONNECTION env variable unset but required."
}
ServerErr::File => {
"File doesn't exist, ya dingus."
}
};
write!(f, "{} {}", *self as i32, pretty)
}
}
#[allow(dead_code)]
enum ErrorKind {
Severed,
Fatal,
}
struct Error {
kind: ErrorKind,
msg: Option<String>,
finished: u64,
}
pub struct ShoopLogger {
pid: i32,
mode: ShoopMode,
log_level: Level,
}
impl log::Log for ShoopLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= self.log_level
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
let prefix_symbol = match record.level() {
Level::Error => "E".red().bold(),
Level::Warn => "W".yellow().bold(),
Level::Info => "I".normal(),
Level::Debug => "D".dimmed(),
Level::Trace => "T".dimmed(),
};
let pidinfo = match self.mode {
ShoopMode::Server => format!("{} ({}) ",
Utc::now().to_rfc2822(),
self.pid),
ShoopMode::Client => String::new(),
};
println!("{}[{}] {}", pidinfo, prefix_symbol, record.args());
}
}
fn flush(&self) {}
}
impl ShoopLogger {
pub fn init(mode: ShoopMode, verbosity: LogVerbosity) -> Result<(), log::SetLoggerError> {
log::set_boxed_logger(Box::new(ShoopLogger {
pid: unsafe { libc::getpid() },
mode,
log_level: verbosity.to_log_level(mode),
}))
}
}
impl Error {
pub fn new(kind: ErrorKind, msg: &str, finished: u64) -> Error {
Error {
kind: kind,
msg: Some(String::from(msg)),
finished: finished,
}
}
#[allow(dead_code)]
pub fn from(err: io::Error, finished: u64) -> Error {
Error {
kind: ErrorKind::Severed,
msg: Some(format!("{:?}", err)),
finished: finished,
}
}
}
impl Target {
pub fn from(s: String) -> Target {
match s.find(':') {
None => Target::Local(s.into()),
Some(i) => {
let owned = s.to_owned();
let (first, second) = owned.split_at(i);
if first.contains('/') {
Target::Local(s.into())
} else {
Target::Remote((first.into(), (&second[1..]).into()))
}
}
}
}
pub fn is_local(&self) -> bool {
match *self {
Target::Local(_) => true,
_ => false,
}
}
pub fn is_remote(&self) -> bool {
!self.is_local()
}
fn looks_like_file_path(&self) -> bool {
let target = self.clone();
let path = match target {
Target::Local(s) | Target::Remote((_, s)) => s,
};
Path::new(&path).file_name().is_some()
}
fn get_path(&self) -> PathBuf {
let target = self.clone();
let path = match target {
Target::Local(s) | Target::Remote((_, s)) => s,
};
PathBuf::from(&path)
}
}
impl fmt::Display for TransferState {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let target = self.clone();
let pretty = match target {
TransferState::Send(l, (rh, rp)) => {
format!("Local({}) -> Remote({}:{})",
l.display(), rh, rp.display())
}
TransferState::Receive((rh, rp), l) => {
format!("Remote({}:{}) -> Local({})",
rh, rp.display(), l.display())
}
};
write!(f, "{}", pretty)
}
}
impl Server {
fn daemonize() {
let stdout = Some(Path::new(&env::var("HOME").unwrap()).join(".shoop.log"));
let stderr = stdout.clone();
daemonize_redirect(stdout, stderr, ChdirMode::ChdirRoot).unwrap();
}
// TODO super basic
fn expand_filename(s: &str) -> String {
if s.starts_with("~/") {
Path::new(&env::var("HOME").unwrap()).join(&s[2..])
.to_str().unwrap().into()
} else {
s.into()
}
}
pub fn new(port_range: PortRange, filename: &str) -> Result<Server, ServerErr> {
let mut err: Option<ServerErr> = None;
let sshconnstr = match env::var("SSH_CONNECTION") {
Ok(s) => s.trim().to_owned(),
Err(_) => {
err = Some(ServerErr::SshEnv);
String::new()
}
};
let expanded_filename = Server::expand_filename(filename);
if !Path::new(&expanded_filename).is_file() {
err = Some(ServerErr::File);
}
match err {
None => {
let sshconn: Vec<&str> = sshconnstr.split(' ').collect();
let ip = sshconn[2].to_owned();
let keybytes = connection::crypto::gen_key();
let port = connection::Server::get_open_port(&port_range).unwrap();
println!("shoop 0 {} {} {}", ip, port, keybytes.to_hex());
Server::daemonize();
info!("got request: serve \"{}\" on range {}", filename, port_range);
info!("sent response: shoop 0 {} {} <key redacted>", ip, port);
let conn = connection::Server::new(IpAddr::from_str(&ip).unwrap(), port, &keybytes);
Ok(Server {
ip: ip,
conn: conn,
filename: expanded_filename,
})
}
Some(e) => {
println!("shooperr {}", e);
Server::daemonize();
info!("got request: serve \"{}\" on range {}", filename, port_range);
error!("init error: {}", e);
Err(e)
}
}
}
pub fn start(&mut self, mode: TransferMode) {
self.conn.listen().unwrap();
let mut connection_count: usize = 0;
info!("listening...");
loop {
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
let (timeout, err) = if connection_count == 0 {
(INITIAL_ACCEPT_TIMEOUT_SECONDS,
"initial connection")
} else {
(RECONNECT_ACCEPT_TIMEOUT_SECONDS,
"reconnect")
};
thread::sleep(Duration::from_secs(timeout));
if let Err(_) = rx.try_recv() {
error!("timed out waiting for {}. exiting.",
err);
std::process::exit(1);
}
});
info!("waiting for connection...");
let client = &mut match self.conn.accept() {
Ok(client) => client,
Err(e) => {
die!("unexpected error on sock accept() {:?}", e);
}
};
connection_count += 1;
tx.send(()).unwrap();
info!("accepted connection with {:?}!", client.getpeer());
match mode {
TransferMode::Send => {
match self.send_file(client) {
Ok(_) => {
info!("done sending file");
let _ = client.close();
break;
}
Err(Error { kind: ErrorKind::Severed, msg, finished }) => {
info!("connection severed, msg: {:?}, finished: {}", msg, finished);
let _ = client.close();
continue;
}
Err(Error { kind: ErrorKind::Fatal, msg, finished }) => {
die!("connection fatal, msg: {:?}, finished: {}", msg, finished);
}
}
}
TransferMode::Receive => {
die!("receive not supported yet");
// match recv_file(&self.conn, filesize.unwrap(), &local_path, offset) {
// Ok(_) => {
// info!("done sending file");
// let _ = client.close();
// break;
// }
// Err(Error { kind: ErrorKind::Severed, msg, finished }) => {
// info!("connection severed, msg: {:?}, finished: {}", msg, finished);
// let _ = client.close();
// continue;
// }
// Err(Error { kind: ErrorKind::Fatal, msg, finished }) => {
// die!("connection fatal, msg: {:?}, finished: {}", msg, finished);
// }
// }
}
}
}
info!("stopped listening.");
}
fn recv_offset<T: Transceiver>(&mut self, client: &mut T) -> Result<u64, Error> {
let mut buf = vec![0u8; 1024];
match client.recv(&mut buf) {
Ok(i) if i < 8 => return Err(Error::new(ErrorKind::Severed, &format!("msg too short"), 0)),
Err(e) => return Err(Error::new(ErrorKind::Severed, &format!("0-length msg received. {:?}", e), 0)),
_ => {}
};
let mut rdr = Cursor::new(buf);
let offset = rdr.read_u64::<LittleEndian>().unwrap();
Ok(offset)
}
fn send_remaining<T: Transceiver>(&mut self, client: &mut T, remaining: u64) -> Result<(), Error> {
let mut buf = vec![0u8; 1024];
let mut wtr = vec![];
wtr.write_u64::<LittleEndian>(remaining).unwrap();
buf[..wtr.len()].copy_from_slice(&wtr);
client.send(&mut buf, wtr.len())
.map_err(|e| Error::new(ErrorKind::Severed,
&format!("failed to write filesize hdr. {:?}", e), remaining))
}
fn get_file_size(filename: &str) -> u64 {
File::open(filename.to_owned()).unwrap()
.metadata().unwrap()
.len()
}
fn send_file<T: Transceiver>(&mut self, client: &mut T) -> Result<(), Error> {
let mut buf = vec![0u8; connection::MAX_MESSAGE_SIZE];
let offset = try!(self.recv_offset(client));
info!("starting at offset {}", offset);
let remaining = Server::get_file_size(&self.filename) - offset;
info!("{} bytes remaining", remaining);
try!(self.send_remaining(client, remaining));
info!("sent remaining packet. sending file...");
let reader = file::Reader::new(&self.filename, offset);
loop {
match reader.rx.recv() {
Ok(ReadMsg::Finish) => {
break;
}
Ok(ReadMsg::Read(payload)) => {
buf[..payload.len()].copy_from_slice(&payload);
if let Err(e) = client.send(&mut buf, payload.len()) {
return Err(Error::new(ErrorKind::Severed,
&format!("{:?}", e),
remaining));
}
}
Err(_) | Ok(ReadMsg::Error) => {
client.close().expect("Error closing stream");
error!("failed to read from file");
panic!("failed to read from file");
}
}
}
if let Err(e) = client.recv(&mut buf[..]) {
warn!("finished sending, but failed getting client confirmation");
return Err(Error::new(ErrorKind::Severed,
&format!("{:?}", e),
remaining))
}
info!("got client finish confirmation.");
client.close().expect("Error closing stream.");
Ok(())
}
}
impl Client {
pub fn new(source: Target, dest: Target, port_range: PortRange)
-> Result<Client, String> {
if source.is_local() && dest.is_local() ||
source.is_remote() && dest.is_remote() {
return Err("source and dest can't both be local or remote".into());
}
if let Target::Local(path) = source.clone() {
if Path::new(&path).is_file() {
return Err("local source file doesn't exist or is a directory".into());
}
}
if source.is_remote() && !source.looks_like_file_path() ||
dest.is_remote() && !dest.looks_like_file_path() {
return Err("remote target doesn't look like a normal \
file path (folders not supported)".into());
}
let final_dest = match dest.clone() {
Target::Local(path) => {
let source_path = source.get_path();
let dest_path = Path::new(&path);
let final_dest_path = if dest_path.is_dir() {
dest_path.join(source_path.file_name().unwrap())
} else {
dest_path.to_path_buf()
};
Target::Local(final_dest_path)
}
Target::Remote(_) => dest
};
let state = if let Target::Local(s) = source {
if let Target::Remote(d) = final_dest {
TransferState::Send(s, d)
} else {
return Err("source and dest can't both be local".into());
}
} else if let Target::Remote(s) = source {
if let Target::Local(d) = final_dest {
TransferState::Receive(s, d)
} else {
return Err("source and dest can't both be remote".into());
}
} else {
panic!("something in the assertions are wrong.");
};
debug!("✍️ {}", state);
Ok(Client {
port_range: port_range,
transfer_state: state
})
}
pub fn start(&mut self, force_dl: bool) {
let ssh = match self.transfer_state.clone() {
TransferState::Send(..) => {
panic!("sending unsupported");
}
TransferState::Receive((host, path), _) => {
ssh::Connection::new(host, path, &self.port_range)
}
};
overprint!(" - establishing SSH session...");
let response = ssh.connect().unwrap_or_else(|e| {
error!("ssh error: {}", e.msg);
std::process::exit(1);
});
debug!("👈 init(version: {}, addr: {})",
response.version, response.addr);
let start_ts = Instant::now();
let pb = Progress::new();
match self.transfer_state.clone() {
TransferState::Send(..) => {
die!("send not supported");
}
TransferState::Receive(_, dest_path) => {
self.receive(&dest_path,
force_dl,
response.addr,
&response.key,
&pb);
}
}
let elapsed = start_ts.elapsed().as_secs();
let fmt_time = if elapsed < 60 {
format!("{}s", elapsed)
} else if elapsed < 60 * 60 {
format!("{}m{}s", elapsed / 60, elapsed % 60)
} else {
format!("{}h{}m{}s", elapsed / (60 * 60), elapsed / 60, elapsed % 60)
};
pb.finish(format!("shooped it all up in {}\n", fmt_time.green().bold()));
}
fn confirm_overwrite() -> Result<(),()> {
loop {
print!("\n{}[y/n] ",
"file exists. overwrite? ".yellow().bold());
io::stdout().flush().expect("stdout flush fail");
let mut input = String::new();
io::stdin().read_line(&mut input).expect("stdio fail");
let normalized = input.trim().to_lowercase();
if normalized == "y" ||
normalized == "yes" ||
normalized == "yeah" ||
normalized == "heck yes" {
break;
} else if normalized == "whatever" ||
normalized == "w/e" {
println!("{}", "close enough.".green().bold());
break;
} else if normalized == "n" ||
normalized == "no" ||
normalized == "nah" ||
normalized == "heck naw" {
return Err(())
} else {
println!("answer 'y' or 'n'.")
}
}
Ok(())
}
fn receive(&mut self,
dest_path: &PathBuf,
force_dl: bool,
addr: SocketAddr,
keybytes: &[u8],
pb: &Progress) {
let mut offset = 0u64;
let mut filesize = None;
let path = Path::new(dest_path);
if path.is_file() && !force_dl && Client::confirm_overwrite().is_err() {
error!("sheepishly avoiding overwriting your data. you're welcome, jeez.");
std::process::exit(0);
}
loop {
overprint!(" - opening UDT connection...");
let mut conn = connection::Client::new(addr, &keybytes);
match conn.connect() {
Ok(()) => {
overprint!(" - connection opened, shakin' hands, makin' frands");
info!("👍 UDT connection established")
}
Err(e) => {
die!("errrrrrrr connecting to {}:{} - {:?}", addr.ip(), addr.port(), e);
}
}
let mut buf = vec![0u8; connection::MAX_MESSAGE_SIZE];
let mut wtr = vec![];
wtr.write_u64::<LittleEndian>(offset).unwrap();
buf[..wtr.len()].copy_from_slice(&wtr);
debug!("👉 offset({})", offset);
if let Err(e) = conn.send(&mut buf, wtr.len()) {
error!("{:?}", e);
conn.close().unwrap();
continue;
}
if let Ok(len) = conn.recv(&mut buf[..]) {
if len != 8 {
error!("failed to get filesize from server, probable timeout.");
std::process::exit(1);
}
let mut rdr = Cursor::new(buf);
let remaining = rdr.read_u64::<LittleEndian>().unwrap();
debug!("👈 remaining({})", remaining);
filesize = filesize.or_else(|| {
debug!("✍️ set total filesize to {}", remaining);
Some(remaining)
});
if filesize.unwrap() - remaining != offset {
error!("it seems the server filesize has changed. dying.");
std::process::exit(1);
}
buf = rdr.into_inner();
pb.size(filesize.unwrap());
pb.add(offset);
pb.message(format!("{} ",
dest_path.file_name().unwrap().to_string_lossy().blue()));
match recv_file(&mut conn,
filesize.unwrap(),
path,
offset,
&pb) {
Ok(_) => {
debug!("👉 finish packet");
pb.message(format!("{} (done, sending confirmation) ",
dest_path.file_name().unwrap().to_string_lossy().green()));
buf[0] = 0;
if let Err(e) = conn.send(&mut buf, 1) {
warn!("failed to send close signal to server: {:?}", e);
}
pb.message(format!("{} ",
dest_path.file_name().unwrap().to_string_lossy().green()));
let _ = conn.close();
break;
}
Err(Error { kind: ErrorKind::Severed, finished, .. }) => {
pb.message(format!("{}", "[[conn severed]] ".yellow().bold()));
offset = finished;
}
Err(Error { kind: ErrorKind::Fatal, msg, .. }) => {
die!("{:?}", msg);
}
}
}
let _ = conn.close();
}
}
}
fn recv_file<T: Transceiver>(conn: &mut T,
filesize: u64,
filename: &Path,
offset: u64,
pb: &Progress)
-> Result<(), Error> {
let f = file::Writer::new(filename.to_path_buf());
f.seek(SeekFrom::Start(offset));
debug!("✍️ seeking to pos {} in {}", offset, filename.display());
let mut total = offset;
let buf = &mut [0u8; connection::MAX_MESSAGE_SIZE];
loop {
let len = match conn.recv(buf) {
Ok(len) if len > 0 => {
len
}
Ok(_) => {
f.close();
warn!("\n\nempty msg, severing\n");
return Err(Error::new(ErrorKind::Severed,
"empty msg", total))
}
Err(e) => {
f.close();
warn!("\n\nUDT err, severing");
return Err(Error::new(ErrorKind::Severed,
&format!("{:?}", e), total))
}
};
total += len as u64;
pb.add(len as u64);
f.write_all(buf[..len].to_owned());
if total >= filesize {
break;
}
}
f.close();
debug!("");
debug!("👾 file writing thread joined and closed");
Ok(())
}
|
use std::convert::TryInto;
use std::future::Future;
use std::io::IoSlice;
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6};
use std::time::Duration;
use anyhow::Result;
use async_std::io::{ReadExt, WriteExt};
use async_std::net::{TcpListener, TcpStream};
use wasmtime::{Caller, FuncType, Linker, ValType};
use wasmtime::{Memory, Trap};
use crate::api::error::IntoTrap;
use crate::state::DnsIterator;
use crate::{api::get_memory, state::ProcessState};
use super::{
link_async2_if_match, link_async3_if_match, link_async4_if_match, link_async5_if_match,
link_async6_if_match, link_async7_if_match, link_if_match,
};
// Register the error APIs to the linker
pub(crate) fn register(
linker: &mut Linker<ProcessState>,
namespace_filter: &[String],
) -> Result<()> {
link_async4_if_match(
linker,
"lunatic::networking",
"resolve",
FuncType::new(
[ValType::I32, ValType::I32, ValType::I32, ValType::I32],
[ValType::I32],
),
resolve,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::networking",
"drop_dns_iterator",
FuncType::new([ValType::I64], []),
drop_dns_iterator,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::networking",
"resolve_next",
FuncType::new(
[
ValType::I64,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
],
[ValType::I32],
),
resolve_next,
namespace_filter,
)?;
link_async6_if_match(
linker,
"lunatic::networking",
"tcp_bind",
FuncType::new(
[
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
],
[ValType::I32],
),
tcp_bind,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::networking",
"drop_tcp_listener",
FuncType::new([ValType::I64], []),
drop_tcp_listener,
namespace_filter,
)?;
link_async3_if_match(
linker,
"lunatic::networking",
"tcp_accept",
FuncType::new([ValType::I64, ValType::I32, ValType::I32], [ValType::I32]),
tcp_accept,
namespace_filter,
)?;
link_async7_if_match(
linker,
"lunatic::networking",
"tcp_connect",
FuncType::new(
[
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
],
[ValType::I32],
),
tcp_connect,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::networking",
"drop_tcp_stream",
FuncType::new([ValType::I64], []),
drop_tcp_stream,
namespace_filter,
)?;
link_if_match(
linker,
"lunatic::networking",
"clone_tcp_stream",
FuncType::new([ValType::I64], [ValType::I64]),
clone_tcp_stream,
namespace_filter,
)?;
link_async5_if_match(
linker,
"lunatic::networking",
"tcp_write_vectored",
FuncType::new(
[
ValType::I64,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
],
[ValType::I32],
),
tcp_write_vectored,
namespace_filter,
)?;
link_async5_if_match(
linker,
"lunatic::networking",
"tcp_read",
FuncType::new(
[
ValType::I64,
ValType::I32,
ValType::I32,
ValType::I32,
ValType::I32,
],
[ValType::I32],
),
tcp_read,
namespace_filter,
)?;
link_async2_if_match(
linker,
"lunatic::networking",
"tcp_flush",
FuncType::new([ValType::I64, ValType::I32], [ValType::I32]),
tcp_flush,
namespace_filter,
)?;
Ok(())
}
//% lunatic::networking::resolve(
//% name_str_ptr: u32,
//% name_str_len: u32,
//% timeout: u32,
//% id_u64_ptr: u32,
//% ) -> u32
//%
//% Returns:
//% * 0 on success - The ID of the newly created DNS iterator is written to **id_u64_ptr**
//% * 1 on error - The error ID is written to **id_u64_ptr**
//%
//% Performs a DNS resolution. The returned iterator may not actually yield any values
//% depending on the outcome of any resolution performed.
//%
//% Traps:
//% * If the name is not a valid utf8 string.
//% * If **name_str_ptr + name_str_len** is outside the memory.
//% * If **id_ptr** is outside the memory.
fn resolve(
mut caller: Caller<ProcessState>,
name_str_ptr: u32,
name_str_len: u32,
timeout: u32,
id_u64_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let mut buffer = vec![0; name_str_len as usize];
let memory = get_memory(&mut caller)?;
memory
.read(&caller, name_str_ptr as usize, buffer.as_mut_slice())
.or_trap("lunatic::network::resolve")?;
let name = std::str::from_utf8(buffer.as_slice()).or_trap("lunatic::network::resolve")?;
// Check for timeout during lookup
let return_ = if let Some(result) = tokio::select! {
_ = async_std::task::sleep(Duration::from_millis(timeout as u64)), if timeout != 0 => None,
result = async_net::resolve(name) => Some(result)
} {
let (iter_or_error_id, result) = match result {
Ok(sockets) => {
// This is a bug in clippy, this collect is not needless
#[allow(clippy::needless_collect)]
let id = caller
.data_mut()
.resources
.dns_iterators
.add(DnsIterator::new(sockets.into_iter()));
(id, 0)
}
Err(error) => {
let error_id = caller.data_mut().errors.add(error.into());
(error_id, 1)
}
};
memory
.write(
&mut caller,
id_u64_ptr as usize,
&iter_or_error_id.to_le_bytes(),
)
.or_trap("lunatic::networking::resolve")?;
Ok(result)
} else {
// Call timed out
let error = std::io::Error::new(std::io::ErrorKind::TimedOut, "Resolve call timed out");
let error_id = caller.data_mut().errors.add(error.into());
memory
.write(&mut caller, id_u64_ptr as usize, &error_id.to_le_bytes())
.or_trap("lunatic::networking::resolve")?;
Ok(1)
};
return_
})
}
//% lunatic::networking::drop_dns_iterator(dns_iter_id: u64)
//%
//% Drops the DNS iterator resource..
//%
//% Traps:
//% * If the DNS iterator ID doesn't exist.
fn drop_dns_iterator(mut caller: Caller<ProcessState>, dns_iter_id: u64) -> Result<(), Trap> {
caller
.data_mut()
.resources
.dns_iterators
.remove(dns_iter_id)
.or_trap("lunatic::networking::drop_dns_iterator")?;
Ok(())
}
//% lunatic::networking::resolve_next(
//% dns_iter_id: u64,
//% addr_type_u32_ptr: u32,
//% addr_u8_ptr: u32,
//% port_u16_ptr: u32,
//% flow_info_u32_ptr: u32,
//% scope_id_u32_ptr: u32,
//% ) -> u32
//%
//% Returns:
//% * 0 on success
//% * 1 on error - There are no more addresses in this iterator
//%
//% Takes the next socket address from DNS iterator and writes it to the passed in pointers.
//% Addresses type is going to be a value of `4` or `6`, representing v4 or v6 addresses. The
//% caller needs to reserve enough space at `addr_u8_ptr` for both values to fit in (16 bytes).
//% `flow_info_u32_ptr` & `scope_id_u32_ptr` are only going to be used with version v6.
//%
//% Traps:
//% * If the DNS iterator ID doesn't exist.
//% * If **addr_type_u32_ptr** is outside the memory
//% * If **addr_u8_ptr** is outside the memory
//% * If **port_u16_ptr** is outside the memory
//% * If **flow_info_u32_ptr** is outside the memory
//% * If **scope_id_u32_ptr** is outside the memory
fn resolve_next(
mut caller: Caller<ProcessState>,
dns_iter_id: u64,
addr_type_u32_ptr: u32,
addr_u8_ptr: u32,
port_u16_ptr: u32,
flow_info_u32_ptr: u32,
scope_id_u32_ptr: u32,
) -> Result<u32, Trap> {
let memory = get_memory(&mut caller)?;
let dns_iter = caller
.data_mut()
.resources
.dns_iterators
.get_mut(dns_iter_id)
.or_trap("lunatic::networking::resolve_next")?;
match dns_iter.next() {
Some(socket_addr) => {
match socket_addr {
SocketAddr::V4(v4) => {
memory
.write(&mut caller, addr_type_u32_ptr as usize, &4u32.to_le_bytes())
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(&mut caller, addr_u8_ptr as usize, &v4.ip().octets())
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(&mut caller, port_u16_ptr as usize, &v4.port().to_le_bytes())
.or_trap("lunatic::networking::resolve_next")?;
}
SocketAddr::V6(v6) => {
memory
.write(&mut caller, addr_type_u32_ptr as usize, &6u32.to_le_bytes())
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(&mut caller, addr_u8_ptr as usize, &v6.ip().octets())
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(&mut caller, port_u16_ptr as usize, &v6.port().to_le_bytes())
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(
&mut caller,
flow_info_u32_ptr as usize,
&v6.flowinfo().to_le_bytes(),
)
.or_trap("lunatic::networking::resolve_next")?;
memory
.write(
&mut caller,
scope_id_u32_ptr as usize,
&v6.scope_id().to_le_bytes(),
)
.or_trap("lunatic::networking::resolve_next")?;
}
}
Ok(0)
}
None => Ok(1),
}
}
//% lunatic::networking::tcp_bind(
//% addr_type: u32,
//% addr_u8_ptr: u32,
//% port: u32,
//% flow_info: u32,
//% scope_id: u32,
//% id_u64_ptr: u32
//% ) -> u32
//%
//% Returns:
//% * 0 on success - The ID of the newly created TCP listener is written to **id_u64_ptr**
//% * 1 on error - The error ID is written to **id_u64_ptr**
//%
//% Creates a new TCP listener, which will be bound to the specified address. The returned listener
//% is ready for accepting connections.
//%
//% Binding with a port number of 0 will request that the OS assigns a port to this listener. The
//% port allocated can be queried via the `local_addr` (TODO) method.
//%
//% Traps:
//% * If **addr_type** is neither 4 or 6.
//% * If **addr_u8_ptr** is outside the memory
//% * If **id_u64_ptr** is outside the memory.
fn tcp_bind(
mut caller: Caller<ProcessState>,
addr_type: u32,
addr_u8_ptr: u32,
port: u32,
flow_info: u32,
scope_id: u32,
id_u64_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let memory = get_memory(&mut caller)?;
let socket_addr = socket_address(
&caller,
&memory,
addr_type,
addr_u8_ptr,
port,
flow_info,
scope_id,
)?;
let (tcp_listener_or_error_id, result) = match TcpListener::bind(socket_addr).await {
Ok(listener) => (caller.data_mut().resources.tcp_listeners.add(listener), 0),
Err(error) => (caller.data_mut().errors.add(error.into()), 1),
};
memory
.write(
&mut caller,
id_u64_ptr as usize,
&tcp_listener_or_error_id.to_le_bytes(),
)
.or_trap("lunatic::networking::create_environment")?;
Ok(result)
})
}
//% lunatic::networking::drop_tcp_listener(tcp_listener_id: i64)
//%
//% Drops the TCP listener resource.
//%
//% Traps:
//% * If the DNS iterator ID doesn't exist.
fn drop_tcp_listener(mut caller: Caller<ProcessState>, tcp_listener_id: u64) -> Result<(), Trap> {
caller
.data_mut()
.resources
.tcp_listeners
.remove(tcp_listener_id)
.or_trap("lunatic::networking::drop_tcp_listener")?;
Ok(())
}
//% lunatic::networking::tcp_accept(
//% listener_id: u64,
//% id_u64_ptr: u32,
//% peer_addr_dns_iter_id_u64_ptr: u32
//% ) -> u32
//%
//% Returns:
//% * 0 on success - The ID of the newly created TCP stream is written to **id_u64_ptr** and the
//% peer address is returned as an DNS iterator with just one element and written
//% to **peer_addr_dns_iter_id_u64_ptr**.
//% * 1 on error - The error ID is written to **id_u64_ptr**
//%
//% Traps:
//% * If the tcp listener ID doesn't exist.
//% * If **id_u64_ptr** is outside the memory.
//% * If **peer_socket_addr_id_ptr** is outside the memory.
fn tcp_accept(
mut caller: Caller<ProcessState>,
listener_id: u64,
id_ptr: u32,
socket_addr_id_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let tcp_listener = caller
.data()
.resources
.tcp_listeners
.get(listener_id)
.or_trap("lunatic::network::tcp_accept")?;
let (tcp_stream_or_error_id, peer_addr_iter, result) = match tcp_listener.accept().await {
Ok((stream, socket_addr)) => {
let stream_id = caller.data_mut().resources.tcp_streams.add(stream);
let dns_iter_id = caller
.data_mut()
.resources
.dns_iterators
.add(DnsIterator::new(vec![socket_addr].into_iter()));
(stream_id, dns_iter_id, 0)
}
Err(error) => (caller.data_mut().errors.add(error.into()), 0, 1),
};
let memory = get_memory(&mut caller)?;
memory
.write(
&mut caller,
id_ptr as usize,
&tcp_stream_or_error_id.to_le_bytes(),
)
.or_trap("lunatic::networking::tcp_accept")?;
memory
.write(
&mut caller,
socket_addr_id_ptr as usize,
&peer_addr_iter.to_le_bytes(),
)
.or_trap("lunatic::networking::tcp_accept")?;
Ok(result)
})
}
//% lunatic::networking::tcp_connect(
//% addr_type: u32,
//% addr_u8_ptr: u32,
//% port: u32,
//% flow_info: u32,
//% scope_id: u32,
//% timeout: u32,
//% id_u64_ptr: u32,
//% ) -> u32
//%
//% Returns:
//% * 0 on success - The ID of the newly created TCP stream is written to **id_ptr**.
//% * 1 on error - The error ID is written to **id_ptr**
//%
//% Traps:
//% * If **addr_type** is neither 4 or 6.
//% * If **addr_u8_ptr** is outside the memory
//% * If **id_u64_ptr** is outside the memory.
#[allow(clippy::too_many_arguments)]
fn tcp_connect(
mut caller: Caller<ProcessState>,
addr_type: u32,
addr_u8_ptr: u32,
port: u32,
flow_info: u32,
scope_id: u32,
timeout: u32,
id_u64_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let memory = get_memory(&mut caller)?;
let socket_addr = socket_address(
&caller,
&memory,
addr_type,
addr_u8_ptr,
port,
flow_info,
scope_id,
)?;
if let Some(result) = tokio::select! {
_ = async_std::task::sleep(Duration::from_millis(timeout as u64)), if timeout != 0 => None,
result = TcpStream::connect(socket_addr) => Some(result)
} {
let (stream_or_error_id, result) = match result {
Ok(stream) => (caller.data_mut().resources.tcp_streams.add(stream), 0),
Err(error) => (caller.data_mut().errors.add(error.into()), 1),
};
memory
.write(
&mut caller,
id_u64_ptr as usize,
&stream_or_error_id.to_le_bytes(),
)
.or_trap("lunatic::networking::tcp_connect")?;
Ok(result)
} else {
// Call timed out
let error = std::io::Error::new(std::io::ErrorKind::TimedOut, "Connect timed out");
let error_id = caller.data_mut().errors.add(error.into());
memory
.write(&mut caller, id_u64_ptr as usize, &error_id.to_le_bytes())
.or_trap("lunatic::networking::tcp_connect")?;
Ok(1)
}
})
}
//% lunatic::networking::drop_tcp_stream(tcp_stream_id: u64)
//%
//% Drops the TCP stream resource..
//%
//% Traps:
//% * If the DNS iterator ID doesn't exist.
fn drop_tcp_stream(mut caller: Caller<ProcessState>, tcp_stream_id: u64) -> Result<(), Trap> {
caller
.data_mut()
.resources
.tcp_streams
.remove(tcp_stream_id)
.or_trap("lunatic::networking::drop_tcp_stream")?;
Ok(())
}
//% lunatic::networking::clone_tcp_stream(tcp_stream_id: u64) -> u64
//%
//% Clones a TCP stream returning the ID of the clone.
//%
//% Traps:
//% * If the stream ID doesn't exist.
fn clone_tcp_stream(mut caller: Caller<ProcessState>, tcp_stream_id: u64) -> Result<u64, Trap> {
let stream = caller
.data()
.resources
.tcp_streams
.get(tcp_stream_id)
.or_trap("lunatic::networking::clone_process")?
.clone();
let id = caller.data_mut().resources.tcp_streams.add(stream);
Ok(id)
}
//% lunatic::networking::tcp_write_vectored(
//% stream_id: u64,
//% ciovec_array_ptr: u32,
//% ciovec_array_len: u32,
//% timeout: u32,
//% i64_opaque_ptr: u32,
//% ) -> u32
//%
//% Returns:
//% * 0 on success - The number of bytes written is written to **opaque_ptr**
//% * 1 on error - The error ID is written to **opaque_ptr**
//%
//% Gathers data from the vector buffers and writes them to the stream. **ciovec_array_ptr** points
//% to an array of (ciovec_ptr, ciovec_len) pairs where each pair represents a buffer to be written.
//%
//% Traps:
//% * If the stream ID doesn't exist.
//% * If **ciovec_array_ptr + (ciovec_array_len * 8)** is outside the memory, or any of the sub
//% ciovecs point outside of the memory.
//% * If **i64_opaque_ptr** is outside the memory.
fn tcp_write_vectored(
mut caller: Caller<ProcessState>,
stream_id: u64,
ciovec_array_ptr: u32,
ciovec_array_len: u32,
timeout: u32,
opaque_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let memory = get_memory(&mut caller)?;
let buffer = memory
.data(&caller)
.get(ciovec_array_ptr as usize..(ciovec_array_ptr + ciovec_array_len * 8) as usize)
.or_trap("lunatic::networking::tcp_write_vectored")?;
// Ciovecs consist of 32bit ptr + 32bit len = 8 bytes.
let vec_slices: Result<Vec<_>> = buffer
.chunks_exact(8)
.map(|ciovec| {
let ciovec_ptr =
u32::from_le_bytes(ciovec[0..4].try_into().expect("works")) as usize;
let ciovec_len =
u32::from_le_bytes(ciovec[4..8].try_into().expect("works")) as usize;
let slice = memory
.data(&caller)
.get(ciovec_ptr..(ciovec_ptr + ciovec_len))
.or_trap("lunatic::networking::tcp_write_vectored")?;
Ok(IoSlice::new(slice))
})
.collect();
let vec_slices = vec_slices?;
let mut stream = caller
.data()
.resources
.tcp_streams
.get(stream_id)
.or_trap("lunatic::network::tcp_write_vectored")?
.clone();
// Check for timeout
if let Some(result) = tokio::select! {
_ = async_std::task::sleep(Duration::from_millis(timeout as u64)), if timeout != 0 => None,
result = stream.write_vectored(vec_slices.as_slice()) => Some(result)
} {
let (opaque, return_) = match result {
Ok(bytes) => (bytes as u64, 0),
Err(error) => (caller.data_mut().errors.add(error.into()), 1),
};
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, opaque_ptr as usize, &opaque.to_le_bytes())
.or_trap("lunatic::networking::tcp_write_vectored")?;
Ok(return_)
} else {
// Call timed out
let error = std::io::Error::new(std::io::ErrorKind::TimedOut, "Write call timed out");
let error_id = caller.data_mut().errors.add(error.into());
memory
.write(&mut caller, opaque_ptr as usize, &error_id.to_le_bytes())
.or_trap("lunatic::networking::tcp_write_vectored")?;
Ok(1)
}
})
}
//% lunatic::networking::tcp_read(
//% stream_id: u64,
//% buffer_ptr: u32,
//% buffer_len: u32,
//% timeout: u32,
//% i64_opaque_ptr: u32,
//% ) -> i32
//%
//% Returns:
//% * 0 on success - The number of bytes read is written to **opaque_ptr**
//% * 1 on error - The error ID is written to **opaque_ptr**
//%
//% Reads data from TCP stream and writes it to the buffer.
//%
//% Traps:
//% * If the stream ID doesn't exist.
//% * If **buffer_ptr + buffer_len** is outside the memory.
//% * If **i64_opaque_ptr** is outside the memory.
fn tcp_read(
mut caller: Caller<ProcessState>,
stream_id: u64,
buffer_ptr: u32,
buffer_len: u32,
timeout: u32,
opaque_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let mut stream = caller
.data()
.resources
.tcp_streams
.get(stream_id)
.or_trap("lunatic::network::tcp_read")?
.clone();
let memory = get_memory(&mut caller)?;
let buffer = memory
.data_mut(&mut caller)
.get_mut(buffer_ptr as usize..(buffer_ptr + buffer_len) as usize)
.or_trap("lunatic::networking::tcp_read")?;
// Check for timeout first
if let Some(result) = tokio::select! {
_ = async_std::task::sleep(Duration::from_millis(timeout as u64)), if timeout != 0 => None,
result = stream.read(buffer) => Some(result)
} {
let (opaque, return_) = match result {
Ok(bytes) => (bytes as u64, 0),
Err(error) => (caller.data_mut().errors.add(error.into()), 1),
};
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, opaque_ptr as usize, &opaque.to_le_bytes())
.or_trap("lunatic::networking::tcp_read")?;
Ok(return_)
} else {
// Call timed out
let error = std::io::Error::new(std::io::ErrorKind::TimedOut, "Read call timed out");
let error_id = caller.data_mut().errors.add(error.into());
memory
.write(&mut caller, opaque_ptr as usize, &error_id.to_le_bytes())
.or_trap("lunatic::networking::tcp_read")?;
Ok(1)
}
})
}
//% lunatic::networking::tcp_flush(stream_id: u64, error_id_ptr: u32) -> u32
//%
//% Returns:
//% * 0 on success
//% * 1 on error - The error ID is written to **error_id_ptr**
//%
//% Flushes this output stream, ensuring that all intermediately buffered contents reach their
//% destination.
//%
//% Traps:
//% * If the stream ID doesn't exist.
//% * If **error_id_ptr** is outside the memory.
fn tcp_flush(
mut caller: Caller<ProcessState>,
stream_id: u64,
error_id_ptr: u32,
) -> Box<dyn Future<Output = Result<u32, Trap>> + Send + '_> {
Box::new(async move {
let mut stream = caller
.data()
.resources
.tcp_streams
.get(stream_id)
.or_trap("lunatic::network::tcp_flush")?
.clone();
let (error_id, result) = match stream.flush().await {
Ok(()) => (0, 0),
Err(error) => (caller.data_mut().errors.add(error.into()), 1),
};
let memory = get_memory(&mut caller)?;
memory
.write(&mut caller, error_id_ptr as usize, &error_id.to_le_bytes())
.or_trap("lunatic::networking::tcp_flush")?;
Ok(result)
})
}
fn socket_address(
caller: &Caller<ProcessState>,
memory: &Memory,
addr_type: u32,
addr_u8_ptr: u32,
port: u32,
flow_info: u32,
scope_id: u32,
) -> Result<SocketAddr, Trap> {
Ok(match addr_type {
4 => {
let ip = memory
.data(&caller)
.get(addr_u8_ptr as usize..(addr_u8_ptr + 4) as usize)
.or_trap("lunatic::network::socket_address*")?;
let addr = <Ipv4Addr as From<[u8; 4]>>::from(ip.try_into().expect("exactly 4 bytes"));
SocketAddrV4::new(addr, port as u16).into()
}
6 => {
let ip = memory
.data(&caller)
.get(addr_u8_ptr as usize..(addr_u8_ptr + 16) as usize)
.or_trap("lunatic::network::socket_address*")?;
let addr = <Ipv6Addr as From<[u8; 16]>>::from(ip.try_into().expect("exactly 16 bytes"));
SocketAddrV6::new(addr, port as u16, flow_info, scope_id).into()
}
_ => return Err(Trap::new("Unsupported address type in socket_address*")),
})
}
|
// Copyright 2020 IOTA Stiftung
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
use crate::address::{Address, Port};
use serde::Deserialize;
use std::net::{IpAddr, Ipv4Addr};
const DEFAULT_BINDING_PORT: u16 = 15600;
const DEFAULT_BINDING_ADDR: IpAddr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));
/// Network configuration builder.
#[derive(Default, Deserialize)]
pub struct NetworkConfigBuilder {
binding_port: Option<u16>,
binding_addr: Option<IpAddr>,
}
impl NetworkConfigBuilder {
/// Creates a new config builder.
pub fn new() -> Self {
Self::default()
}
/// Sets the binding port for the network.
pub fn binding_port(mut self, port: u16) -> Self {
self.binding_port.replace(port);
self
}
/// Sets the binding address for the network.
pub fn binding_addr(mut self, addr: &str) -> Self {
match addr.parse() {
Ok(addr) => {
self.binding_addr.replace(addr);
}
Err(e) => panic!("Error parsing address: {:?}", e),
}
self
}
/// Builds the network config.
pub fn finish(self) -> NetworkConfig {
NetworkConfig {
binding_port: self.binding_port.unwrap_or(DEFAULT_BINDING_PORT),
binding_addr: self.binding_addr.unwrap_or(DEFAULT_BINDING_ADDR),
}
}
}
/// Network configuration.
#[derive(Clone, Copy, Debug)]
pub struct NetworkConfig {
pub(crate) binding_port: u16,
pub(crate) binding_addr: IpAddr,
}
impl NetworkConfig {
/// Returns a builder for this config.
pub fn build() -> NetworkConfigBuilder {
NetworkConfigBuilder::new()
}
pub(crate) fn socket_addr(&self) -> Address {
match self.binding_addr {
IpAddr::V4(addr) => Address::from_v4_addr_and_port(addr, Port(self.binding_port)),
IpAddr::V6(addr) => Address::from_v6_addr_and_port(addr, Port(self.binding_port)),
}
}
}
|
fn main() {
println!("Hello, world!");
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn summing_up() {
// let numbers = vec![1.0, 5.0, 2.0, 0.5, 1.5];
let mut numbers: Vec<f64> = Vec::new();
numbers.push(1.0);
numbers.push(5.0);
numbers.push(4.0);
let result: f64 = numbers.iter().sum();
assert_eq!(result, 10.0)
}
}
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Queryable)]
pub struct Task {
pub id: i32,
pub description: String,
pub completed: bool,
pub created_at: chrono::NaiveDateTime,
}
|
use crate::{IntegerMachineType, RealMachineType};
use std::fmt::{Display, Formatter};
use std::ops::{Add, Mul, Neg, Sub};
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum NumericType {
Integer(IntegerMachineType),
Real(RealMachineType),
}
impl NumericType {
pub(super) fn as_real(&self) -> RealMachineType {
match self {
NumericType::Integer(i) => *i as RealMachineType,
NumericType::Real(r) => *r,
}
}
pub(super) fn as_int(&self) -> IntegerMachineType {
match self {
NumericType::Integer(i) => *i,
NumericType::Real(r) => *r as IntegerMachineType,
}
}
}
impl Display for NumericType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
NumericType::Integer(i) => Display::fmt(&i, f),
NumericType::Real(r) => Display::fmt(&r, f),
}
}
}
impl Add for NumericType {
type Output = NumericType;
fn add(self, rhs: Self) -> Self::Output {
if let (NumericType::Integer(i1), NumericType::Integer(i2)) = (self, rhs) {
NumericType::Integer(i1 + i2)
} else {
NumericType::Real(self.as_real() + rhs.as_real())
}
}
}
impl Sub for NumericType {
type Output = NumericType;
fn sub(self, rhs: Self) -> Self::Output {
if let (NumericType::Integer(i1), NumericType::Integer(i2)) = (self, rhs) {
NumericType::Integer(i1 - i2)
} else {
NumericType::Real(self.as_real() - rhs.as_real())
}
}
}
impl Mul for NumericType {
type Output = NumericType;
fn mul(self, rhs: Self) -> Self::Output {
if let (NumericType::Integer(i1), NumericType::Integer(i2)) = (self, rhs) {
NumericType::Integer(i1 * i2)
} else {
NumericType::Real(self.as_real() * rhs.as_real())
}
}
}
impl Neg for NumericType {
type Output = NumericType;
fn neg(self) -> Self::Output {
match self {
NumericType::Integer(i) => NumericType::Integer(-i),
NumericType::Real(r) => NumericType::Real(-r),
}
}
}
|
use notify::{Watcher, watcher, DebouncedEvent, RecursiveMode, RecommendedWatcher};
use std::sync::mpsc::channel;
use std::time::Duration;
use crate::Error;
use std::thread;
use std::path::PathBuf;
pub struct WatcherHandle {
watcher: RecommendedWatcher,
}
pub fn create_watcher(path: &PathBuf, debounce: u64) -> Result<WatcherHandle, Error> {
let (tx, rx) = channel();
let mut watcher = watcher(tx, Duration::from_millis(debounce))?;
watcher.watch(path, RecursiveMode::Recursive)?;
thread::spawn(move || {
loop {
match rx.recv() {
Ok(event) => {
match event {
DebouncedEvent::Write(path) => {
if let Some(extension) = path.extension() {
let extension = extension.to_string_lossy();
if extension == "rs" {
crate::wasm_pack::run_wasm_pack().unwrap()
}
}
},
DebouncedEvent::Create(path) => {
if let Some(extension) = path.extension() {
let extension = extension.to_string_lossy();
if extension == "rs" {
crate::wasm_pack::run_wasm_pack().unwrap()
}
}
},
DebouncedEvent::Remove(path) => {
if let Some(extension) = path.extension() {
let extension = extension.to_string_lossy();
if extension == "rs" {
crate::wasm_pack::run_wasm_pack().unwrap()
}
}
},
DebouncedEvent::Rename(from, to) => {
},
_ => {},
}
},
Err(e) => {
dbg!(e);
},
}
}
});
Ok(WatcherHandle {
watcher: watcher,
})
}
|
#[doc = "Reader of register DONE"]
pub type R = crate::R<u32, super::DONE>;
#[doc = "Reader of field `proc1`"]
pub type PROC1_R = crate::R<bool, bool>;
#[doc = "Reader of field `proc0`"]
pub type PROC0_R = crate::R<bool, bool>;
#[doc = "Reader of field `sio`"]
pub type SIO_R = crate::R<bool, bool>;
#[doc = "Reader of field `vreg_and_chip_reset`"]
pub type VREG_AND_CHIP_RESET_R = crate::R<bool, bool>;
#[doc = "Reader of field `xip`"]
pub type XIP_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram5`"]
pub type SRAM5_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram4`"]
pub type SRAM4_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram3`"]
pub type SRAM3_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram2`"]
pub type SRAM2_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram1`"]
pub type SRAM1_R = crate::R<bool, bool>;
#[doc = "Reader of field `sram0`"]
pub type SRAM0_R = crate::R<bool, bool>;
#[doc = "Reader of field `rom`"]
pub type ROM_R = crate::R<bool, bool>;
#[doc = "Reader of field `busfabric`"]
pub type BUSFABRIC_R = crate::R<bool, bool>;
#[doc = "Reader of field `resets`"]
pub type RESETS_R = crate::R<bool, bool>;
#[doc = "Reader of field `clocks`"]
pub type CLOCKS_R = crate::R<bool, bool>;
#[doc = "Reader of field `xosc`"]
pub type XOSC_R = crate::R<bool, bool>;
#[doc = "Reader of field `rosc`"]
pub type ROSC_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 16"]
#[inline(always)]
pub fn proc1(&self) -> PROC1_R {
PROC1_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 15"]
#[inline(always)]
pub fn proc0(&self) -> PROC0_R {
PROC0_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 14"]
#[inline(always)]
pub fn sio(&self) -> SIO_R {
SIO_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 13"]
#[inline(always)]
pub fn vreg_and_chip_reset(&self) -> VREG_AND_CHIP_RESET_R {
VREG_AND_CHIP_RESET_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn xip(&self) -> XIP_R {
XIP_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11"]
#[inline(always)]
pub fn sram5(&self) -> SRAM5_R {
SRAM5_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn sram4(&self) -> SRAM4_R {
SRAM4_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn sram3(&self) -> SRAM3_R {
SRAM3_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn sram2(&self) -> SRAM2_R {
SRAM2_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7"]
#[inline(always)]
pub fn sram1(&self) -> SRAM1_R {
SRAM1_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn sram0(&self) -> SRAM0_R {
SRAM0_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5"]
#[inline(always)]
pub fn rom(&self) -> ROM_R {
ROM_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn busfabric(&self) -> BUSFABRIC_R {
BUSFABRIC_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3"]
#[inline(always)]
pub fn resets(&self) -> RESETS_R {
RESETS_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2"]
#[inline(always)]
pub fn clocks(&self) -> CLOCKS_R {
CLOCKS_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1"]
#[inline(always)]
pub fn xosc(&self) -> XOSC_R {
XOSC_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0"]
#[inline(always)]
pub fn rosc(&self) -> ROSC_R {
ROSC_R::new((self.bits & 0x01) != 0)
}
}
|
// Copyright 2021 The MWC Develope;
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Generic implementation of owner API eth functions
use crate::grin_keychain::Keychain;
use crate::grin_util::Mutex;
use crate::types::NodeClient;
use crate::{wallet_lock, WalletInst, WalletLCProvider};
use grin_wallet_util::grin_core::global;
use std::sync::Arc;
use crate::swap::ethereum::InfuraNodeClient;
use crate::swap::ethereum::*;
use crate::swap::trades;
use crate::swap::types::Currency;
use crate::swap::ErrorKind;
use crate::Error;
/// Show Wallet Info
pub fn info<'a, L, C, K>(
wallet_inst: Arc<Mutex<Box<dyn WalletInst<'a, L, C, K>>>>,
currency: Currency,
) -> Result<(String, String, String), Error>
where
L: WalletLCProvider<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
wallet_lock!(wallet_inst, w);
let ethereum_wallet = w.get_ethereum_wallet()?;
let eth_infura_project_id = trades::get_eth_infura_projectid(&Currency::Ether, &None).unwrap();
let chain = if global::is_mainnet() {
"mainnet".to_string()
} else {
"ropsten".to_string()
};
let eth_node_client = InfuraNodeClient::new(
eth_infura_project_id,
chain,
ethereum_wallet.clone(),
"".to_string(),
"".to_string(),
)?;
let height = eth_node_client.height()?;
let balance = eth_node_client.balance(currency)?;
Ok((
ethereum_wallet.address.clone().unwrap(),
format!("{}", height),
balance.0,
))
}
/// get eth balance
pub fn get_eth_balance(ethereum_wallet: EthereumWallet) -> Result<u64, Error> {
let eth_infura_project_id = trades::get_eth_infura_projectid(&Currency::Ether, &None).unwrap();
let chain = if global::is_mainnet() {
"mainnet".to_string()
} else {
"ropsten".to_string()
};
let eth_node_client = InfuraNodeClient::new(
eth_infura_project_id,
chain,
ethereum_wallet.clone(),
"".to_string(),
"".to_string(),
)?;
let balance = eth_node_client.balance(Currency::Ether)?;
Ok(balance.1)
}
/// transfer ethereum coins out
pub fn transfer<'a, L, C, K>(
wallet_inst: Arc<Mutex<Box<dyn WalletInst<'a, L, C, K>>>>,
currency: Currency,
dest: Option<String>,
amount: Option<String>,
) -> Result<(), ErrorKind>
where
L: WalletLCProvider<'a, C, K>,
C: NodeClient + 'a,
K: Keychain + 'a,
{
wallet_lock!(wallet_inst, w);
let ethereum_wallet = w.get_ethereum_wallet()?;
let eth_infura_project_id = trades::get_eth_infura_projectid(&Currency::Ether, &None).unwrap();
let chain = if global::is_mainnet() {
"mainnet".to_string()
} else {
"ropsten".to_string()
};
let eth_node_client = InfuraNodeClient::new(
eth_infura_project_id,
chain,
ethereum_wallet.clone(),
"".to_string(),
"".to_string(),
)?;
let to = to_eth_address(dest.unwrap())?;
let amounts = to_gnorm(amount.unwrap().as_str(), "1");
let amounts_u64 = amounts.parse::<u64>();
info!(
"currency: {}, to: {}, amounts: {}, amounts_u64: {}",
currency,
to,
amounts,
amounts_u64.clone().unwrap()
);
let result = eth_node_client.transfer(currency, to, amounts_u64.unwrap());
match result {
Ok(_tx_hash) => Ok(()),
Err(e) => Err(e),
}
}
|
use std::io;
fn main() {
println!("Insert a number to be converted into Euro currency!");
let mut number_input = String::new();
io::stdin()
.read_line(&mut number_input)
.expect("Insert floating number!");
let number_to_convert: f64 = number_input.trim().parse().unwrap_or(0.0);
if number_to_convert == 0.0 {
println!("Please insert a (floating) number!");
} else {
let currency_as_string = convert_into_euro(number_to_convert);
println!("{}", currency_as_string);
}
}
fn convert_into_euro(mut number_to_convert: f64) -> String {
let mut banknote_500_counter = 0;
let mut banknote_200_counter = 0;
let mut banknote_100_counter = 0;
let mut banknote_50_counter = 0;
let mut banknote_20_counter = 0;
let mut banknote_10_counter = 0;
let mut banknote_5_counter = 0;
let mut coin_2_counter = 0;
let mut coin_1_counter = 0;
let mut coin_50_cent_counter = 0;
let mut coin_20_cent_counter = 0;
let mut coin_10_cent_counter = 0;
let mut coin_5_cent_counter = 0;
let mut coin_2_cent_counter = 0;
let mut coin_1_cent_counter = 0;
const FIVE_HUNDRED: f64 = 500_f64;
const TWO_HUNDRED: f64 = 200_f64;
const ONE_HUNDRED: f64 = 100_f64;
const FIFTY: f64 = 50_f64;
const TWENTY: f64 = 20_f64;
const TEN: f64 = 10_f64;
const FIVE: f64 = 5_f64;
const TWO: f64 = 2_f64;
const ONE: f64 = 1_f64;
const FIFTY_CENT: f64 = 0.5_f64;
const TWENTY_CENT: f64 = 0.2_f64;
const TEN_CENT: f64 = 0.1_f64;
const FIVE_CENT: f64 = 0.05_f64;
const TWO_CENT: f64 = 0.02_f64;
const ONE_CENT: f64 = 0.01_f64;
let mut conversion_result = String::new();
conversion_result += &format!("\nEuro conversion for number: {}\n\n", number_to_convert);
while number_to_convert > 0.0 {
if number_to_convert >= FIVE_HUNDRED {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, FIVE_HUNDRED);
banknote_500_counter += 1;
} else if number_to_convert >= TWO_HUNDRED {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, TWO_HUNDRED);
banknote_200_counter += 1;
} else if number_to_convert >= ONE_HUNDRED {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, ONE_HUNDRED);
banknote_100_counter += 1;
} else if number_to_convert >= FIFTY {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, FIFTY);
banknote_50_counter += 1;
} else if number_to_convert >= TWENTY {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, TWENTY);
banknote_20_counter += 1;
} else if number_to_convert >= TEN {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, TEN);
banknote_10_counter += 1;
} else if number_to_convert >= FIVE {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, FIVE);
banknote_5_counter += 1;
} else if number_to_convert >= TWO {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, TWO);
coin_2_counter += 1;
} else if number_to_convert >= ONE {
number_to_convert = subtract_and_round_to_two_decimal_places(number_to_convert, ONE);
coin_1_counter += 1;
} else if number_to_convert >= FIFTY_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, FIFTY_CENT);
coin_50_cent_counter += 1;
} else if number_to_convert >= TWENTY_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, TWENTY_CENT);
coin_20_cent_counter += 1;
} else if number_to_convert >= TEN_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, TEN_CENT);
coin_10_cent_counter += 1;
} else if number_to_convert >= FIVE_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, FIVE_CENT);
coin_5_cent_counter += 1;
} else if number_to_convert >= TWO_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, TWO_CENT);
coin_2_cent_counter += 1;
} else if number_to_convert >= ONE_CENT {
number_to_convert =
subtract_and_round_to_two_decimal_places(number_to_convert, ONE_CENT);
coin_1_cent_counter += 1;
}
}
if banknote_500_counter > 0 {
conversion_result += &format!("500 Euro notes: {}\n", banknote_500_counter);
}
if banknote_200_counter > 0 {
conversion_result += &format!("200 Euro notes: {}\n", banknote_200_counter);
}
if banknote_100_counter > 0 {
conversion_result += &format!("100 Euro notes: {}\n", banknote_100_counter);
}
if banknote_50_counter > 0 {
conversion_result += &format!("50 Euro notes: {}\n", banknote_50_counter);
}
if banknote_20_counter > 0 {
conversion_result += &format!("20 Euro notes: {}\n", banknote_20_counter);
}
if banknote_10_counter > 0 {
conversion_result += &format!("10 Euro notes: {}\n", banknote_10_counter);
}
if banknote_5_counter > 0 {
conversion_result += &format!("5 Euro notes: {}\n", banknote_5_counter);
}
if coin_2_counter > 0 {
conversion_result += &format!("2 Euro coins: {}\n", coin_2_counter);
}
if coin_1_counter > 0 {
conversion_result += &format!("1 Euro coins: {}\n", coin_1_counter);
}
if coin_50_cent_counter > 0 {
conversion_result += &format!("50 cent coins: {}\n", coin_50_cent_counter);
}
if coin_20_cent_counter > 0 {
conversion_result += &format!("20 cent coins: {}\n", coin_20_cent_counter);
}
if coin_10_cent_counter > 0 {
conversion_result += &format!("10 cent coins: {}\n", coin_10_cent_counter);
}
if coin_5_cent_counter > 0 {
conversion_result += &format!("5 cent coins: {}\n", coin_5_cent_counter);
}
if coin_2_cent_counter > 0 {
conversion_result += &format!("2 cent coins: {}\n", coin_2_cent_counter);
}
if coin_1_cent_counter > 0 {
conversion_result += &format!("1 cent coins: {}\n", coin_1_cent_counter);
}
conversion_result
}
fn subtract_and_round_to_two_decimal_places(
mut number_to_subtract: f64,
subtraction_value: f64,
) -> f64 {
number_to_subtract -= subtraction_value;
round_to_two_decimal_places(number_to_subtract)
}
fn round_to_two_decimal_places(number_to_round: f64) -> f64 {
(number_to_round * 100.0).round() / 100.0
}
|
extern crate csv;
#[macro_use]
extern crate serde_derive;
use std::env;
use std::error::Error;
use std::ffi::OsString;
use std::fs::File;
use std::process;
#[derive(Deserialize, Debug)]
struct Row<'a> {
address_code: &'a str,
pref_code: &'a str,
city_code: &'a str,
area_code: &'a str,
zip_code: &'a str,
company_flag: &'a str,
stop_flag: &'a str,
pref_name: &'a str,
pref_name_kana: &'a str,
city_name: &'a str,
city_name_kana: &'a str,
town_area: &'a str,
town_area_kana: &'a str,
town_area_supplement: &'a str,
kyoto_street_name: &'a str,
street_name: &'a str,
street_name_kana: &'a str,
supplement: &'a str,
company_name: &'a str,
company_name_kana: &'a str,
company_address: &'a str,
new_address_code: &'a str,
}
#[derive(Debug, PartialEq)]
struct Pref {
id: u8,
name: String,
}
#[derive(Debug, PartialEq)]
struct City {
code: String,
name: String,
pref_id: u8,
}
#[derive(Debug, PartialEq)]
struct Town {
id: u32,
zip_code: String,
area_name: String,
street_name: String,
city_code: String,
}
fn run() -> Result<(), Box<Error>> {
let file_path = get_first_args()?;
let file = File::open(file_path)?;
let mut rdr = csv::Reader::from_reader(file);
let mut prefs: Vec<Pref> = vec![];
let mut cities: Vec<City> = vec![];
let mut towns: Vec<Town> = vec![];
for result in rdr.records() {
let temp = result.unwrap();
let record: Row = temp.deserialize(None)?;
let is_company: u8 = record.company_flag.parse().unwrap();
if is_company == 1 {
continue;
}
let pref = collect_prefs(&record);
let city = collect_cities(&record);
let town = collect_towns(&record, &city, &towns);
if !prefs.contains(&pref) {
prefs.push(pref);
}
if !cities.contains(&city) {
cities.push(city);
}
if !towns.contains(&town) {
towns.push(town);
}
}
create_prefs_sql(&prefs);
create_cities_sql(&cities);
create_towns_sql(&towns);
Ok(())
}
fn get_first_args() -> Result<OsString, Box<Error>> {
match env::args_os().nth(1) {
None => Err(From::from("expected 1 argument, but got none")),
Some(file_path) => Ok(file_path),
}
}
fn collect_prefs(record: &Row) -> Pref {
Pref {
id: record.pref_code.parse().unwrap(),
name: record.pref_name.to_string(),
}
}
fn collect_cities(record: &Row) -> City {
City {
code: record.city_code.to_string(),
name: record.city_name.to_string(),
pref_id: record.pref_code.parse().unwrap(),
}
}
fn collect_towns(record: &Row, city: &City, towns: &Vec<Town>) -> Town {
Town {
id: towns.len() as u32 + 1,
zip_code: record.zip_code.to_string(),
area_name: record.town_area.to_string(),
street_name: format!("{}{}", record.street_name.to_string(), record.kyoto_street_name),
city_code: city.code.to_string()
}
}
fn create_prefs_sql(prefs: &Vec<Pref>) {
println!("INSERT INTO prefs(id, name) VALUES");
for pref in prefs {
print!("({}, '{}')", pref.id, pref.name);
if prefs.last().unwrap() == pref {
print!(";\n\n");
} else {
print!(",\n");
}
}
}
fn create_cities_sql(cities: &Vec<City>) {
println!("INSERT INTO cities(id, pref_id, code, name) VALUES");
for city in cities {
print!("('{}', {}, '{}')", city.code, city.pref_id, city.name);
if cities.last().unwrap() == city {
print!(";\n\n");
} else {
print!(",\n");
}
}
}
fn create_towns_sql(towns: &Vec<Town>) {
println!("INSERT INTO towns(id, city_id, zip_code, area_name, street_name)");
for town in towns {
print!("({}, '{}', '{}', '{}', '{}')", town.id, town.city_code, town.zip_code, town.area_name, town.street_name);
if towns.last().unwrap() == town {
print!(";\n\n");
} else {
print!(",\n");
}
}
}
fn main() {
if let Err(err) = run() {
println!("{}", err);
process::exit(1);
}
}
|
// Copyright 2015 The GeoRust Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use rustc_serialize::json::{self, ToJson};
use ::{Error, FromObject};
/// Coordinate Reference System Objects
///
/// [GeoJSON Format Specification § 3]
/// (http://geojson.org/geojson-spec.html#coordinate-reference-system-objects)
#[derive(Clone, Debug, PartialEq)]
pub enum Crs {
/// Named CRS
///
/// [GeoJSON Format Specification § 3.1]
/// (http://geojson.org/geojson-spec.html#named-crs)
Named {
name: String,
},
/// Linked CRS
///
/// [GeoJSON Format Specification § 3.2]
/// (http://geojson.org/geojson-spec.html#linked-crs)
Linked {
href: String,
type_: Option<String>,
},
}
impl<'a> From<&'a Crs> for json::Object {
fn from(crs: &'a Crs) -> json::Object {
let mut crs_map = BTreeMap::new();
let mut properties_map = BTreeMap::new();
match *crs {
Crs::Named{ref name} => {
crs_map.insert(String::from("type"), "name".to_json());
properties_map.insert(String::from("name"), name.to_json());
}
Crs::Linked{ref href, ref type_} => {
crs_map.insert(String::from("type"), "link".to_json());
properties_map.insert(String::from("href"), href.to_json());
if let Some(ref type_) = *type_ {
properties_map.insert(String::from("type"), type_.to_json());
}
}
};
crs_map.insert(String::from("properties"), properties_map.to_json());
return crs_map;
}
}
impl FromObject for Crs {
fn from_object(object: &json::Object) -> Result<Self, Error> {
let type_ = expect_type!(object);
let properties = expect_object!(expect_property!(object, "properties", "Encountered CRS object type with no properties"));
return Ok(match type_ {
"name" => {
let name = expect_string!(expect_property!(properties, "name", "Encountered Named CRS object with no name"));
Crs::Named {name: String::from(name)}
},
"link" => {
let href = expect_string!(expect_property!(properties, "href", "Encountered Linked CRS object with no link")).to_string();
let type_ = match properties.get("type") {
Some(type_) => Some(expect_string!(type_).to_string()),
None => None,
};
Crs::Linked {type_: type_, href: href}
},
_ => return Err(Error::new("Encountered unknown CRS type")),
});
}
}
impl ToJson for Crs {
fn to_json(&self) -> json::Json {
return json::Json::Object(self.into());
}
}
|
use super::*;
mod deserialize;
mod serialize;
mod structure;
mod value_deserializer;
mod value_serializer;
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// https://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Math helper functions
#[cfg(feature="simd_support")]
use core::simd::*;
pub trait WideningMultiply<RHS = Self> {
type Output;
fn wmul(self, x: RHS) -> Self::Output;
}
macro_rules! wmul_impl {
($ty:ty, $wide:ty, $shift:expr) => {
impl WideningMultiply for $ty {
type Output = ($ty, $ty);
#[inline(always)]
fn wmul(self, x: $ty) -> Self::Output {
let tmp = (self as $wide) * (x as $wide);
((tmp >> $shift) as $ty, tmp as $ty)
}
}
}
}
wmul_impl! { u8, u16, 8 }
wmul_impl! { u16, u32, 16 }
wmul_impl! { u32, u64, 32 }
#[cfg(feature = "i128_support")]
wmul_impl! { u64, u128, 64 }
// This code is a translation of the __mulddi3 function in LLVM's
// compiler-rt. It is an optimised variant of the common method
// `(a + b) * (c + d) = ac + ad + bc + bd`.
//
// For some reason LLVM can optimise the C version very well, but
// keeps shuffling registers in this Rust translation.
macro_rules! wmul_impl_large {
($ty:ty, $half:expr) => {
impl WideningMultiply for $ty {
type Output = ($ty, $ty);
#[inline(always)]
fn wmul(self, b: $ty) -> Self::Output {
const LOWER_MASK: $ty = !0 >> $half;
let mut low = (self & LOWER_MASK).wrapping_mul(b & LOWER_MASK);
let mut t = low >> $half;
low &= LOWER_MASK;
t += (self >> $half).wrapping_mul(b & LOWER_MASK);
low += (t & LOWER_MASK) << $half;
let mut high = t >> $half;
t = low >> $half;
low &= LOWER_MASK;
t += (b >> $half).wrapping_mul(self & LOWER_MASK);
low += (t & LOWER_MASK) << $half;
high += t >> $half;
high += (self >> $half).wrapping_mul(b >> $half);
(high, low)
}
}
}
}
#[cfg(not(feature = "i128_support"))]
wmul_impl_large! { u64, 32 }
#[cfg(feature = "i128_support")]
wmul_impl_large! { u128, 64 }
macro_rules! wmul_impl_usize {
($ty:ty) => {
impl WideningMultiply for usize {
type Output = (usize, usize);
#[inline(always)]
fn wmul(self, x: usize) -> Self::Output {
let (high, low) = (self as $ty).wmul(x as $ty);
(high as usize, low as usize)
}
}
}
}
#[cfg(target_pointer_width = "32")]
wmul_impl_usize! { u32 }
#[cfg(target_pointer_width = "64")]
wmul_impl_usize! { u64 }
pub trait CastFromInt<T> {
fn cast_from_int(i: T) -> Self;
}
impl CastFromInt<u32> for f32 {
fn cast_from_int(i: u32) -> Self { i as f32 }
}
impl CastFromInt<u64> for f64 {
fn cast_from_int(i: u64) -> Self { i as f64 }
}
#[cfg(feature="simd_support")]
macro_rules! simd_float_from_int {
($ty:ident, $uty:ident) => {
impl CastFromInt<$uty> for $ty {
fn cast_from_int(i: $uty) -> Self { $ty::from(i) }
}
}
}
#[cfg(feature="simd_support")] simd_float_from_int! { f32x2, u32x2 }
#[cfg(feature="simd_support")] simd_float_from_int! { f32x4, u32x4 }
#[cfg(feature="simd_support")] simd_float_from_int! { f32x8, u32x8 }
#[cfg(feature="simd_support")] simd_float_from_int! { f32x16, u32x16 }
#[cfg(feature="simd_support")] simd_float_from_int! { f64x2, u64x2 }
#[cfg(feature="simd_support")] simd_float_from_int! { f64x4, u64x4 }
#[cfg(feature="simd_support")] simd_float_from_int! { f64x8, u64x8 }
/// `PartialOrd` for vectors compares lexicographically. We want to compare all
/// the individual SIMD lanes instead, and get the combined result over all
/// lanes. This is possible using something like `a.lt(b).all()`, but we
/// implement it as a trait so we can write the same code for `f32` and `f64`.
/// Only the comparison functions we need are implemented.
pub trait CompareAll {
fn all_lt(self, other: Self) -> bool;
fn all_le(self, other: Self) -> bool;
}
impl CompareAll for f32 {
fn all_lt(self, other: Self) -> bool { self < other }
fn all_le(self, other: Self) -> bool { self <= other }
}
impl CompareAll for f64 {
fn all_lt(self, other: Self) -> bool { self < other }
fn all_le(self, other: Self) -> bool { self <= other }
}
#[cfg(feature="simd_support")]
macro_rules! simd_less_then {
($ty:ident) => {
impl CompareAll for $ty {
fn all_lt(self, other: Self) -> bool { self.lt(other).all() }
fn all_le(self, other: Self) -> bool { self.le(other).all() }
}
}
}
#[cfg(feature="simd_support")] simd_less_then! { f32x2 }
#[cfg(feature="simd_support")] simd_less_then! { f32x4 }
#[cfg(feature="simd_support")] simd_less_then! { f32x8 }
#[cfg(feature="simd_support")] simd_less_then! { f32x16 }
#[cfg(feature="simd_support")] simd_less_then! { f64x2 }
#[cfg(feature="simd_support")] simd_less_then! { f64x4 }
#[cfg(feature="simd_support")] simd_less_then! { f64x8 }
|
use std::net::TcpListener;
use khang_first_project::run;
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let listener = TcpListener::bind("127.0.0.1:8080").expect("Failed to bind to address");
run(listener)?.await
}
// #[cfg(test)]
// mod tests {
// use crate::health_check;
// #[actix_rt::test]
// async fn health_check_succeeds() {
// let response = health_check().await;
// assert!(response.status().is_success())
// }
// }
|
extern crate dotenv;
//use diesel::prelude::*;
use crate::models::character::Character;
use crate::models::movie::Movie;
use crate::schema::movie_characters;
#[derive(Identifiable, Queryable, Associations, Debug)]
#[table_name = "movie_characters"]
#[belongs_to(Movie)]
#[belongs_to(Character)]
pub struct MovieCharacter {
pub id: i32,
pub movie_id: i32,
pub character_id: i32,
}
|
//! Implementation for simple format strings using curly braces.
//!
//! See [`SimpleCurlyFormat`] for more information.
//!
//! [`SimpleCurlyFormat`]: struct.SimpleCurlyFormat.html
use regex::{CaptureMatches, Captures, Regex};
use crate::{ArgumentResult, ArgumentSpec, Error, Format, Position};
lazy_static::lazy_static! {
/// The regular expression used for parsing simple curly format strings.
static ref PYTHON_RE: Regex = Regex::new(r"\{(?P<key>\w+)?\}").unwrap();
}
fn parse_position(key: &str) -> Position<'_> {
key.parse()
.map(Position::Index)
.unwrap_or_else(|_| Position::Key(key))
}
fn parse_next(captures: Captures<'_>) -> ArgumentSpec<'_> {
let position = captures
.name("key")
.map(|m| parse_position(m.as_str()))
.unwrap_or_else(|| Position::Auto);
let group = captures.get(0).unwrap();
ArgumentSpec::new(group.start(), group.end()).with_position(position)
}
/// Format argument iterator for [`SimpleCurlyFormat`].
///
/// [`SimpleCurlyFormat`]: struct.SimpleCurlyFormat.html
#[derive(Debug)]
pub struct SimpleCurlyIter<'f> {
captures: CaptureMatches<'static, 'f>,
}
impl<'f> SimpleCurlyIter<'f> {
fn new(format: &'f str) -> Self {
SimpleCurlyIter {
captures: PYTHON_RE.captures_iter(format),
}
}
}
impl<'f> Iterator for SimpleCurlyIter<'f> {
type Item = ArgumentResult<'f>;
fn next(&mut self) -> Option<Self::Item> {
self.captures.next().map(|capture| Ok(parse_next(capture)))
}
}
/// Format implementation for simple curly brace based format strings.
///
/// This syntax is a subset of what Python 3, Rust, .NET and many logging libraries use. Each
/// argument is formated in display mode.
///
/// 1. `{}`: Refers to the next positional argument.
/// 2. `{0}`: Refers to the argument at index `0`.
/// 3. `{name}`: Refers to the named argument with key `"name"`.
///
/// # Example
///
/// ```rust
/// use dynfmt::{Format, SimpleCurlyFormat};
///
/// let formatted = SimpleCurlyFormat.format("hello, {}", &["world"]);
/// assert_eq!("hello, world", formatted.expect("formatting failed"));
/// ```
#[derive(Debug)]
pub struct SimpleCurlyFormat;
impl<'f> Format<'f> for SimpleCurlyFormat {
type Iter = SimpleCurlyIter<'f>;
fn iter_args(&self, format: &'f str) -> Result<Self::Iter, Error<'f>> {
Ok(SimpleCurlyIter::new(format))
}
}
|
use crate::internal::messaging::Msg;
use crate::types::Endpoint;
use futures::future;
use futures::prelude::{ Future, Stream, Sink };
use futures::sync::mpsc;
use tokio::spawn;
pub(crate) fn discover(consumer: mpsc::Receiver<Option<Endpoint>>, sender: mpsc::Sender<Msg>, endpoint: Endpoint)
-> impl Future<Item=(), Error=()>
{
struct State {
sender: mpsc::Sender<Msg>,
endpoint: Endpoint,
}
let initial =
State {
sender,
endpoint,
};
consumer.fold(initial, |state, _|
{
let send_endpoint =
state.sender
.clone()
.send(Msg::Establish(state.endpoint))
.then(|_| Ok(()));
spawn(send_endpoint);
future::ok(state)
}).map(|_| ())
}
|
use core::cell::{Cell, RefCell};
use stm32f4::stm32f411 as stm32;
use stm32::{interrupt};
use cortex_m::interrupt::{free, Mutex};
use heapless::String;
use heapless::spsc::{Queue};
use heapless::consts::{U256, U16};
use core::borrow::{BorrowMut, Borrow};
use core::ops::DerefMut;
use crate::command_parser::{CommandParser};
use crate::config;
static mut QUEUE: Queue<DmaRequest, U256, u8> = Queue(heapless::i::Queue::u8());
static DMA_SUBSYSTEM: Mutex<RefCell<Option< DmaSubsystem >>> =
Mutex::new(RefCell::new(None));
static DMA_CHANNEL_WRITE_NUM: usize = 7;
static DMA_CHANNEL_READ_NUM: usize = 5;
pub(crate) struct DmaSubsystem {
queue: &'static mut Queue<DmaRequest, U256, u8>,
req_in_progress: Cell<Option<DmaRequest>>,
dma2: stm32::DMA2,
dma_immediate_buf: u8,
}
impl DmaSubsystem {
pub(crate) fn initialize(dma2: stm32::DMA2) {
free(|cs| {
let dma = DmaSubsystem {
queue: unsafe { &mut QUEUE },
req_in_progress: Cell::new(None),
dma2,
dma_immediate_buf: 0,
};
DMA_SUBSYSTEM.borrow(cs).replace(Some (dma) );
unsafe {
config::configure_dma();
};
if let Some(ref mut dma) = DMA_SUBSYSTEM.borrow(cs).borrow_mut().deref_mut() {
dma.initiate_dma_read();
}
});
}
pub(crate) fn put_dma_request(req: DmaRequest) {
free (|cs| {
if let Some(ref mut dma) = DMA_SUBSYSTEM.borrow(cs).borrow_mut().deref_mut() {
match dma.queue.enqueue(req) {
Ok(_) => (),
Err(_req) => panic!("Dma queue is full")
}
if dma.dma2.st[DMA_CHANNEL_WRITE_NUM].cr.read().en().bit_is_clear() &&
dma.dma2.hisr.read().tcif7().bit_is_clear() {
dma.process_dma_queue();
}
}
})
}
fn process_dma_queue(&mut self) {
if let Some(req) = self.queue.borrow_mut().dequeue() {
self.req_in_progress.replace(Some(req));
self.start_dma_transfer();
} else {
self.req_in_progress.replace(None);
}
}
fn start_dma_transfer(&self) {
if let Some(r) = self.req_in_progress.take() {
self.dma2.st[DMA_CHANNEL_WRITE_NUM].m0ar.write(|w| w.m0a().bits(
r.data.as_ptr() as u32)
);
self.dma2.st[DMA_CHANNEL_WRITE_NUM].ndtr.write(|w| w.ndt().bits(r.data.len() as u16));
self.dma2.st[DMA_CHANNEL_WRITE_NUM].cr.modify(|_, w| w.en().enabled());
}
}
fn initiate_dma_read(&self) {
self.dma2.st[DMA_CHANNEL_READ_NUM].m0ar.write(|w| w.m0a().bits(
self.dma_immediate_buf.borrow() as *const u8 as u32)
);
self.dma2.st[DMA_CHANNEL_READ_NUM].ndtr.write(|w| w.ndt().bits(1));
self.dma2.st[DMA_CHANNEL_READ_NUM].cr.modify(|_, w| w.en().enabled());
}
}
pub(crate) struct DmaRequest {
data: String<U16>
}
impl DmaRequest {
pub(crate) fn build(s: String<U16>) -> DmaRequest {
DmaRequest {data: s}
}
}
#[interrupt]
fn DMA2_STREAM7() {
free(|cs| {
if let Some(ref mut dma) = DMA_SUBSYSTEM.borrow(cs).borrow_mut().deref_mut() {
if dma.dma2.hisr.read().tcif7().bit_is_set() {
dma.dma2.hifcr.write(|w| w.ctcif7().set_bit());
dma.process_dma_queue();
}
}
});
}
#[interrupt]
fn DMA2_STREAM5() {
free(|cs| {
if let Some(ref mut dma) = DMA_SUBSYSTEM.borrow(cs).borrow_mut().deref_mut() {
if dma.dma2.hisr.read().tcif5().bit_is_set() {
CommandParser::advance(cs, dma.dma_immediate_buf);
dma.dma2.hifcr.write(|w| w.ctcif5().set_bit());
dma.initiate_dma_read();
}
}
});
}
|
mod config;
mod error;
mod http_server;
mod grpc_server;
mod misc;
use std::process;
use ace::App;
fn main() {
let c = get_proc_info();
match c {
Some(info) => {
println!("{:?}", info);
match &info.server_type {
config::ServerType::HTTP => { http_server::run(info); },
config::ServerType::GRPC => {
match grpc_server::run(info) {
Err(e) => println!("Grpc Server error with : {}", e),
_ => {},
}
},
_ => println!("Not implement"),
}
},
None => println!("argument is none"),
}
}
fn get_proc_info() -> Option<config::ServerConfig> {
let app = App::new()
.config(config::SERVER_NAME, config::VERSION)
.cmd("start", "Start server with user config")
.cmd("help", "Print help information")
.cmd("version", "Print version information")
.opt("-t", "Set server type (Use one of http, https, grpc)")
.opt("-a", "Set the binding address and port for server")
.opt("-r", "Set the root directory for srws");
if let Some(cmd) = app.command() {
match cmd.as_str() {
"start" => {
let mut c = config::ServerConfig::new();
let server_type = app
.value("-t")
.map(|values| {
if values.len() != 1 {
println!("-t value: [SERVER TYPE(http, https, grpc)]");
process::exit(-1);
}
values[0].clone()
});
match server_type {
Some(t) => {
match t.as_str() {
"http" => c.server_type = config::ServerType::HTTP,
"https" => c.server_type = config::ServerType::HTTPS,
"grpc" => c.server_type = config::ServerType::GRPC,
_ => {
println!("1 -t value: [SERVER TYPE(http, https, grpc)]");
process::exit(-1);
}
}
},
None => println!("Use default value for server type"),
}
let addr = app
.value("-a")
.map(|values| {
if values.len() != 1 {
println!("-a value: [ADDRESS:PORT]");
process::exit(-1);
}
values[0].clone()
});
match addr {
Some(a) => c.address = a.parse()
.expect("Unable to parse socket address"),
None => println!("Use default value for binding address"),
}
let storage = app
.value("-r")
.map(|values| {
if values.len() != 1 {
println!("-r value: [DIR]");
process::exit(-1);
}
values[0].clone()
});
match storage {
Some(s) => c.storage = s,
None => println!("Use default value for storage"),
}
Some(c)
}
"help" => {
app.print_help();
None
}
"version" => {
app.print_version();
None
}
_ => {
app.print_error_try("help");
None
}
}
} else {
None
}
}
|
#[derive(Debug, PartialEq, Clone, Copy, Eq, PartialOrd, Ord)]
pub struct Position {
pub line: usize,
pub pos: usize,
}
|
mod bundler;
mod processor;
use crate::bundler::Bundler;
use crate::processor::{
ConfigProcessor, CopyProcessor, EitherProcessor, SvgProcessor, TiledMapProcessor,
};
use clap::{App, Arg};
use env_logger::Env;
use std::path::PathBuf;
fn main() -> anyhow::Result<()> {
env_logger::Builder::from_env(Env::default().default_filter_or("trace")).init();
let args = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(
Arg::with_name("SRC_DIR")
.help("Directory with source assets")
.required(true)
.index(1),
)
.arg(
Arg::with_name("DST_DIR")
.help("Processed assets will be bundled here")
.required(true)
.index(2),
)
.arg(
Arg::with_name("ENTRY")
.help("A file that will be used as an entrypoint")
.required(true)
.index(3),
)
.get_matches();
let source_directory: PathBuf = args
.value_of("SRC_DIR")
.expect("SRC_DIR is required")
.into();
let destination_directory: PathBuf = args
.value_of("DST_DIR")
.expect("DST_DIR is required")
.into();
let entrypoint: PathBuf = args.value_of("ENTRY").expect("ENTRY is required").into();
let copy_processor = CopyProcessor::default();
let tiled_map_processor = TiledMapProcessor::default();
let svg_processor = SvgProcessor::default();
let config_processor = ConfigProcessor::default();
let tiled_map_filter = EitherProcessor::new(tiled_map_processor, copy_processor, |asset| {
asset.path.to_string_lossy().ends_with(".tmx")
});
let svg_filter = EitherProcessor::new(svg_processor, tiled_map_filter, |asset| {
asset.path.to_string_lossy().ends_with(".svg")
});
let ron_filter = EitherProcessor::new(config_processor, svg_filter, |asset| {
asset.path.to_string_lossy().ends_with(".ron")
});
let pipeline = ron_filter;
Bundler::build()
.source_directory(source_directory)
.output_directory(destination_directory)
.entrypoint(entrypoint)
.pipeline(pipeline)
.run()
}
|
use crate::error_system::OsuKeyboardError;
use crate::processor::Processor;
use arduino_uno::Peripherals;
pub struct KeyboardProcessor {
peripherals: Peripherals,
}
impl KeyboardProcessor {
pub fn new(peripherals: Peripherals) -> Self {
Self { peripherals }
}
}
impl Processor for KeyboardProcessor {
fn setup(&self) -> Result<(), OsuKeyboardError> {
Ok(())
}
fn run(&self) -> Result<!, OsuKeyboardError> {
Err(OsuKeyboardError::InitializationFailed)
}
}
|
fn foobar() -> Result<i32, failure::Error> {
let lib = libloading::Library::new("libmath/libmath.so")?;
unsafe {
let f: libloading::Symbol<unsafe extern "C" fn(i32, i32)->i32> = lib.get(b"add")?;
Ok(f(2, 2))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_foobar() {
assert_eq!(4, foobar().unwrap());
}
} |
#![deny(warnings)]
use std::fs::File;
use std::io::{self, Write};
use std::time::Instant;
fn main() {
let uptime = Instant::now().inner().as_secs();
let mut width = 0;
let mut height = 0;
if let Ok(display) = File::open("display:") {
let path = display.path().map(|path| path.into_os_string().into_string().unwrap_or(String::new())).unwrap_or(String::new());
let res = path.split(":").nth(1).unwrap_or("");
width = res.split("/").nth(0).unwrap_or("").parse::<i32>().unwrap_or(0);
height = res.split("/").nth(1).unwrap_or("").parse::<i32>().unwrap_or(0);
}
let mut string = String::new();
string.push_str("\x1B[1;38;5;75m `.-/+NMN+-.` \x1B[0m\x1B[1;38;5;75mroot\x1B[0m@\x1B[1;38;5;75mhostname\x1B[0m\n");
string.push_str("\x1B[1;38;5;75m `:+oo+/-.-yds--/+oo+:` \x1B[0m\x1B[1;38;5;75mOS:\x1B[0m redox-os\n");
string.push_str("\x1B[1;38;5;75m `/ss/++::/+o++++o+/:```:ss/` \x1B[0m\x1B[1;38;5;75mKernel:\x1B[0m redox\n");
string.push_str(&format!("\x1B[1;38;5;75m `/ss/++::/+o++++o+/:```:ss/` \x1B[0m\x1B[1;38;5;75mUptime:\x1B[0m {}s\n", uptime));
string.push_str("\x1B[1;38;5;75m `+h+``oMMN+.````````.:+syyy:/h+` \x1B[0m\x1B[1;38;5;75mShell:\x1B[0m ion\n");
string.push_str(&format!("\x1B[1;38;5;75m /h/+mmm/://:+oo+//+oo+:. hNNh.`/h/ \x1B[0m\x1B[1;38;5;75mResolution:\x1B[0m {}x{}\n", width, height));
string.push_str("\x1B[1;38;5;75m oy` ydds`/s+:` `:+s/-.+Ndd-so \x1B[0m\x1B[1;38;5;75mDE:\x1B[0m orbital\n");
string.push_str("\x1B[1;38;5;75m os `yo /y: :y/.dmM- so \x1B[0m\x1B[1;38;5;75mWM:\x1B[0m orbital\n");
string.push_str("\x1B[1;38;5;75m :h s+ os` \x1B[0m smhhhyyy/ \x1B[1;38;5;75m `so +s h: \x1B[0m\x1B[1;38;5;75mFont:\x1B[0m unifont\n");
string.push_str("\x1B[1;38;5;75m m. -h /h \x1B[0m yM .oM+ \x1B[1;38;5;75m h/ h- .m \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m N s+ d. \x1B[0m yM -Ms \x1B[1;38;5;75m .d +s m \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m h y/ M \x1B[0m yM :+sydy` \x1B[1;38;5;75m M /y h \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m M oo y/ \x1B[0m yM .yNy. \x1B[1;38;5;75m /y oo M \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m y/ `m` .d. \x1B[0m yM :md- \x1B[1;38;5;75m .d.:hNy /y \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m .d` :h:--h: \x1B[0m +s `ss` \x1B[1;38;5;75m :h- oMNh`d. \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m :d.-MMN:.oo: :oo.+sd+..d: \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m -d//oyy////so/:oyo..ydhos/. +MMM::d- \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m `sy- yMMN. `./MMMo+dNm/ ./ss-./ys` \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m .ss/++:+oo+//:-..:+ooo+-``:ss. \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m `:ss/-` `.--::--.` `-/ss:` \x1B[0m\n");
string.push_str("\x1B[1;38;5;75m ./oooooooooooooo/. \x1B[0m\n");
io::stdout().write(string.as_bytes()).unwrap();
}
|
use super::*;
use serde::{Serialize, Deserialize};
use mysql::params;
/// Id type for guild
pub type GuildId = EntityId;
/// Any organisation involved in book renting
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Guild {
/// Unique id
pub id: Option<GuildId>,
/// Name of Guild
pub name: String,
/// Address of Guild
pub address: String,
/// Id of Member to contact
pub contact: MemberId,
}
impl Guild {
/// Construct a new Guild object with given parameters
pub fn new(id: Option<GuildId>, name: String, address: String, contact: MemberId) -> Guild {
Guild {
id: id,
name: name,
address: address,
contact: contact,
}
}
}
impl DMO for Guild {
type Id = GuildId;
fn insert(db: &Database, inp: &Guild) -> Result<GuildId, Error> {
check_varchar_length!(inp.name, inp.address);
Ok(db.pool.prep_exec("insert into guilds (name, address, contact_by_member_id) values (:name, :address, :contact)",
params!{
"name" => inp.name.clone(),
"address" => inp.address.clone(),
"contact" => inp.contact,
}).map(|result|result.last_insert_id()
)?)
}
fn get(db: &Database, guild_id: GuildId) -> Result<Option<Guild>, Error> {
let mut results = db.pool
.prep_exec(
"select guild_id, name, address, contact_by_member_id from guilds where guild_id=:guild_id;",
params!{
"guild_id" => guild_id,
},
)
.map(|result| {
result.map(|x| x.unwrap()).map(|row| {
let (id, name, address, contact) = mysql::from_row(row);
Guild {
id: id,
name: name,
address: address,
contact: contact
}
}).collect::<Vec<Guild>>()
})?;
return Ok(results.pop());
}
fn get_all(db: &Database) -> Result<Vec<Guild>, Error> {
Ok(db
.pool
.prep_exec(
"select guild_id, name, address, contact_by_member_id from guilds;",
(),
)
.map(|result| {
result
.map(|x| x.unwrap())
.map(|row| {
let (id, name, address, contact) = mysql::from_row(row);
Guild {
id: id,
name: name,
address: address,
contact: contact,
}
})
.collect()
})?)
}
fn update(db: &Database, guild: &Guild) -> Result<(), Error> {
check_varchar_length!(guild.name, guild.address);
Ok(db.pool.prep_exec("update guilds set name=:name, address=:address, contact_by_member_id=:contact where guild_id=:id",
params!{
"name" => guild.name.clone(),
"address" => guild.address.clone(),
"contact" => guild.contact,
"id" => guild.id,
}).and(Ok(()))?)
}
fn delete(db: &Database, id: Id) -> Result<bool, Error> {
Ok(db
.pool
.prep_exec(
"delete from guilds where GuildId=:id",
params! {
"id" => id,
},
)
.map_err(|err| Error::DatabaseError(err))
.and_then(|result| match result.affected_rows() {
1 => Ok(true),
0 => Ok(false),
_ => Err(Error::IllegalState),
})?)
}
}
#[cfg(test)]
mod tests {
use database::test_util::*;
use database::*;
#[test]
fn insert_guild_correct() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&mut Member::new(None, _s("external_id")))
.and_then(|member_id| {
let mut orig_guild = Guild::new(
None,
_s("LibrariumAachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
);
db.insert(&mut orig_guild).and_then(|guild_id| {
orig_guild.id = Some(guild_id);
Ok((guild_id, orig_guild))
})
})
.and_then(|(guild_id, orig_guild)| {
db.get(guild_id).and_then(|rec_guild| {
Ok(rec_guild.map_or(false, |fetched_guild| orig_guild == fetched_guild))
})
});
teardown(settings);
match result {
Ok(true) => (),
Ok(false) => panic!("Inserted Guild is not in DB :("),
_ => {
result.unwrap();
()
}
}
}
#[test]
fn insert_guild_name_too_long() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&Member::new(None, _s("external_id")))
.and_then(|member_id| {
db.insert(&mut Guild::new(
None,
_s(TOO_LONG_STRING),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
))
});
teardown(settings);
match result {
Err(Error::DataTooLong(_)) => (),
_ => panic!("Expected DatabaseError::FieldError(FieldError::DataTooLong(\"name\")"),
}
}
#[test]
fn update_guild_correct() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&mut Member::new(None, _s("external_id1")))
.and_then(|member_id| {
let mut orig_guild = Guild::new(
None,
_s("Librarium Aachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
);
db.insert(&mut orig_guild).and_then(|guild_id| {
orig_guild.id = Some(guild_id);
Ok((guild_id, orig_guild))
})
})
.and_then(|(guild_id, orig_guild)| {
db.insert(&mut Member::new(None, _s("other_id")))
.and_then(|other_member_id| Ok((guild_id, orig_guild, other_member_id)))
})
.and_then(|(guild_id, mut orig_guild, other_member_id)| {
orig_guild.name = _s("RPG Librarium Aaachen");
orig_guild.address = _s("postsfadfeddfasdfasdff");
orig_guild.contact = other_member_id;
db.update(&orig_guild)
.and_then(|_| Ok((guild_id, orig_guild)))
})
.and_then(|(guild_id, orig_guild)| {
db.get(guild_id).and_then(|rec_guild| {
Ok(rec_guild.map_or(false, |fetched_guild| orig_guild == fetched_guild))
})
});
teardown(settings);
match result {
Ok(true) => (),
Ok(false) => panic!("Expected updated guild to be corretly stored in DB"),
_ => {
result.unwrap();
()
}
}
}
#[test]
fn update_guild_name_too_long() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&mut Member::new(None, _s("external_id1")))
.and_then(|member_id| {
let mut orig_guild = Guild::new(
None,
_s("Librarium Aachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
);
db.insert(&mut orig_guild)
.and_then(|_guild_id| Ok(orig_guild))
})
.and_then(|mut orig_guild| {
orig_guild.name = _s(TOO_LONG_STRING);
db.update(&orig_guild)
});
teardown(settings);
match result {
Err(Error::DataTooLong(_)) => (),
_ => {
panic!("Expected DatabaseError::FieldError(FieldError::DataTooLong(\"guild.name\")")
}
}
}
#[test]
fn update_guild_address_too_long() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&mut Member::new(None, _s("external_id1")))
.and_then(|member_id| {
let mut orig_guild = Guild::new(
None,
_s("Librarium Aachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
);
db.insert(&mut orig_guild).and_then(|_| Ok(orig_guild))
})
.and_then(|mut orig_guild| {
orig_guild.address = _s(TOO_LONG_STRING);
db.update(&orig_guild)
});
teardown(settings);
match result {
Err(Error::DataTooLong(_)) => (),
_ => panic!(
"Expected DatabaseError::FieldError(FieldError::DataTooLong(\"guild.address\")"
),
}
}
#[test]
fn insert_guild_invalid_contact() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db.insert(&mut Guild::new(
None,
_s("RPG Librarium Aachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
12345,
));
teardown(settings);
match result {
Err(Error::ConstraintError(_)) => (),
_ => panic!("Expected DatabaseError::FieldError(FieldError::ConstraintError)"),
}
}
#[test]
fn update_guild_invalid_contact() {
let settings = setup();
let db = Database::from_settings(&settings).unwrap();
let result = db
.insert(&mut Member::new(None, _s("external_id1")))
.and_then(|member_id| {
let mut orig_guild = Guild::new(
None,
_s("Librarium Aachen"),
_s("Postfach 1231238581412 1238414812 Aachen"),
member_id,
);
db.insert(&mut orig_guild).and_then(|guild_id| {
orig_guild.id = Some(guild_id);
Ok(orig_guild)
})
})
.and_then(|mut orig_guild| {
orig_guild.contact = 12345;
db.update(&orig_guild)
});
teardown(settings);
match result {
Err(Error::ConstraintError(_)) => (),
_ => panic!("Expected DatabaseError::FieldError(FieldError::ConstraintError)"),
}
}
}
|
use libc::{c_int};
use std::kinds::marker::ContravariantLifetime;
use result::{NanoResult, last_nano_error};
use libnanomsg;
/// An endpoint created for a specific socket. Each endpoint is identified
/// by a unique return value that can be further passed to a shutdown
/// function. The shutdown is done through the endpoint itself and not
/// the Socket. However, the `Endpoint` doesn't live longer than the socket
/// itself. This is done through phantom lifetimes.
pub struct Endpoint<'a> {
value: c_int,
socket: c_int,
marker: ContravariantLifetime<'a>
}
impl<'a> Endpoint<'a> {
pub fn new(value: c_int, socket: c_int) -> Endpoint<'a> {
Endpoint {
value: value,
socket: socket,
marker: ContravariantLifetime::<'a>
}
}
pub fn shutdown(&'a mut self) -> NanoResult<()> {
let ret = unsafe { libnanomsg::nn_shutdown(self.socket, self.value) };
if ret == -1 as c_int {
return Err(last_nano_error());
}
Ok(())
}
}
|
// SPDX-License-Identifier: Apache-2.0
mod builder;
mod vm;
pub use vm::{
measure::{self, Measurement},
personality::Personality,
Builder, Hook, Vm,
};
use crate::backend::{self, Datum, Keep};
use crate::binary::Component;
use anyhow::Result;
use kvm_ioctls::Kvm;
use std::sync::{Arc, RwLock};
pub const SHIM: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/bin/shim-sev"));
fn dev_kvm() -> Datum {
let dev_kvm = std::path::Path::new("/dev/kvm");
Datum {
name: "Driver".into(),
pass: dev_kvm.exists(),
info: Some("/dev/kvm".into()),
mesg: None,
}
}
fn kvm_version() -> Datum {
let version = Kvm::new().map(|kvm| kvm.get_api_version());
let (pass, info) = match version {
Ok(v) => (v == 12, Some(v.to_string())),
Err(_) => (false, None),
};
Datum {
name: " API Version".into(),
pass,
info,
mesg: None,
}
}
pub struct Backend;
impl backend::Backend for Backend {
fn name(&self) -> &'static str {
"kvm"
}
fn shim(&self) -> &'static [u8] {
SHIM
}
fn data(&self) -> Vec<Datum> {
vec![dev_kvm(), kvm_version()]
}
fn build(&self, shim: Component, code: Component) -> Result<Arc<dyn Keep>> {
let vm = Builder::new(shim, code, builder::Kvm).build::<()>()?.vm()?;
Ok(Arc::new(RwLock::new(vm)))
}
}
|
use ContextRef;
cpp! {
#include "llvm/IR/LLVMContext.h"
pub fn LLVMRustCreateContext() -> ContextRef as "llvm::LLVMContext*" {
return new llvm::LLVMContext();
}
pub fn LLVMRustDestroyContext(ctx: ContextRef as "llvm::LLVMContext*") {
delete ctx;
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_create_and_destroy() {
let ctx = unsafe { LLVMRustCreateContext() };
unsafe { LLVMRustDestroyContext(ctx) };
}
}
|
use chip8_emulator;
fn main() {
chip8_emulator::start_emulator(String::from("example.chip"));
}
|
use executable_path::executable_path;
use std::{process, str};
use test_utilities::tmptree;
#[test]
fn dotenv() {
let tmp = tmptree! {
".env": "KEY=ROOT",
sub: {
".env": "KEY=SUB",
justfile: "default:\n\techo KEY=$KEY",
},
};
let binary = executable_path("just");
let output = process::Command::new(binary)
.current_dir(tmp.path())
.arg("sub/default")
.output()
.expect("just invocation failed");
assert_eq!(output.status.code().unwrap(), 0);
let stdout = str::from_utf8(&output.stdout).unwrap();
assert_eq!(stdout, "KEY=SUB\n");
}
test! {
name: set_false,
justfile: r#"
set dotenv-load := false
foo:
if [ -n "${DOTENV_KEY+1}" ]; then echo defined; else echo undefined; fi
"#,
stdout: "undefined\n",
stderr: "if [ -n \"${DOTENV_KEY+1}\" ]; then echo defined; else echo undefined; fi\n",
dotenv_load: false,
}
test! {
name: set_implicit,
justfile: r#"
set dotenv-load
foo:
echo $DOTENV_KEY
"#,
stdout: "dotenv-value\n",
stderr: "echo $DOTENV_KEY\n",
dotenv_load: false,
}
test! {
name: set_true,
justfile: r#"
set dotenv-load := true
foo:
echo $DOTENV_KEY
"#,
stdout: "dotenv-value\n",
stderr: "echo $DOTENV_KEY\n",
dotenv_load: false,
}
// Un-comment this on 2021-07-01.
//
// test! {
// name: warning,
// justfile: r#"
// foo:
// echo $DOTENV_KEY
// "#,
// stdout: "dotenv-value\n",
// stderr: "
// warning: A `.env` file was found and loaded, but this behavior will
// change in the future. To silence this warning and continue loading `.env`
// files, add:
// set dotenv-load := true
// To silence this warning and stop loading `.env` files, add:
// set dotenv-load := false
// See https://github.com/casey/just/issues/469 for more details.
// echo $DOTENV_KEY
// ",
// dotenv_load: false,
// }
|
#[allow(unused_imports)]
use super::util::prelude::*;
use super::super::resource::ImageData;
use super::util::{Pack, PackDepth};
use super::BlockRef;
block! {
[pub Player(constructor, pack)]
icon: Option<BlockRef<ImageData>> = None;
name: String = String::from("プレイヤー");
}
impl Player {
pub fn icon(&self) -> Option<&BlockRef<ImageData>> {
self.icon.as_ref()
}
pub fn name(&self) -> &String {
&self.name
}
}
|
pub use crate::{alpha::*, coordinates::*, multi_result::*, names::*};
pub trait Pair<T> {
type Output;
fn pair(self, other: T) -> Self::Output;
}
impl<T, U, E> Pair<Result<U, E>> for Result<T, E>
where
E: Semigroup,
{
type Output = Result<(T, U), E>;
fn pair(self, other: Result<U, E>) -> Self::Output {
match (self, other) {
(Ok(left), Ok(right)) => Ok((left, right)),
(Err(err), Ok(_)) | (Ok(_), Err(err)) => Err(err),
(Err(left), Err(right)) => Err(left.app(right)),
}
}
}
|
pub mod error;
use serde::Deserialize;
use error::{ProxyError, ProxyTomlConfigError};
use std::path::Path;
#[derive(Deserialize, PartialEq, Eq)]
enum ServerSideAuth {
None,
ServerPublicKey,
ServerPrivateKey,
CommonPublicKey,
ClientToken,
}
#[derive(Deserialize, PartialEq, Eq)]
enum ClientSideAuth {
None,
ClientToken,
}
#[derive(Deserialize)]
enum HttpMethod {
Get,
Post
}
#[derive(Deserialize)]
struct ApiInfo {
method: HttpMethod,
server_auth: ServerSideAuth,
client_auth: ClientSideAuth,
}
impl ApiInfo {
pub fn sanity_check(&self) -> Result<(), ProxyTomlConfigError> {
match self.server_auth {
/*
None => {
},
ServerPublicKey => {
},
ServerPrivateKey => {
},
CommonPublicKey => {
},
*/
ServerSideAuth::ClientToken => {
if self.client_auth != ClientSideAuth::ClientToken {
return Err(ProxyTomlConfigError::AuthMethodMissMatch);
} else {
return Ok(());
}
},
_ => {
return Ok(());
}
}
}
}
#[derive(Deserialize)]
struct ProxyInfo {
path: String,
api: Option<ApiInfo>,
subs: Option<Vec<ProxyInfo>>,
}
impl ProxyInfo {
pub fn from_toml_file(path: &Path) -> Result<Vec<Self>, ProxyError> {
let mut f = std::fs::File::open(path).map_err(|e| {ProxyError::FailedToOpenFile(e) })?;
let mut contents = String::new();
Ok(toml::from_str(&contents).map_err(|e| {ProxyError::FailedToDeserializeToml(e)})?)
//from_toml_value(val);
}
pub fn sanity_check(&self, parent_path: &str) -> Result<(), ProxyTomlConfigError> {
// FIXME: how to check valid path
if self.api.is_none() && self.subs.is_none() {
return Err(ProxyTomlConfigError::NoContentUnderPath(parent_path.to_string() + &self.path));
}
if let Some(api) = &self.api {
api.sanity_check()?;
}
if let Some(vec) = &self.subs {
let path = parent_path.to_string() + &self.path + "/";
for p in vec {
p.sanity_check(&path)?;
}
}
Ok(())
}
/*
pub fn from_toml_value(val: toml::value::Value) -> Result<Vec<Self>, ProxyError> {
use toml::value::Value;
match val {
Value::Table(tab) => {
let mut ret = Vec::new();
for (p1, v) in tab {
match v {
Value::Table(tab) => {
for (p2, v) in tab {
match v {
Value::Table(v) => {
ret.push(get_info(p1, p2, v)?);
},
_ => {
return Err(ProxyError::TomlThirdLevelNotTable);
}
}
}
},
_ => {
return Err(ProxyError::TomlSecondLevelNotTable);
}
}
}
return Ok(ret);
},
_ => {
return Err(ProxyError::TomlFirstLevelNotTable)
}
}
}
fn get_info(p1: &str, p2: &str, val: toml::Value::Table) -> Result<ProxyInfo, ProxyError> {
let (p1, p2) =
if let Some(path) = val.get("path") {
} else {
(p1.to_string(), p2.to_string())
}
}
*/
} |
// found elsewhere. adapted and fixed
pub fn reply(msg: &str) -> &str {
let msg = msg.trim();
// filter only alphabetic stuff
let mut msg_2 = String::new();
for c in msg.chars() {
if c.is_alphabetic() {
msg_2.push(c);
}
}
// .all() returns true if you check 0 elements
let is_all_upper = |msg: &str| {
msg.chars().filter(|c| c.is_alphabetic()).all(|c| {
c.is_uppercase()
})
};
let is_question = |msg: &str| msg.chars().last() == Some('?');
match msg {
_ if msg.is_empty() => "Fine. Be that way!",
_ if !msg_2.is_empty() && is_all_upper(&msg_2) => "Whoa, chill out!",
_ if is_question(msg) => "Sure.",
_ => "Whatever.",
}
}
|
use proconio::{fastout, input};
#[fastout]
fn main() {
input! {
n: usize,
s: String,
};
let s: Vec<char> = s.chars().collect();
let mut dp: Vec<i64> = vec![0; n + 1];
dp[1] = match (s[0], s[1]) {
('A', 'T') | ('T', 'A') | ('C', 'G') | ('G', 'C') => 1,
_ => 0,
};
let mut pre: char = s[1];
for i in 2..n {
dp[i] = match (pre, s[i]) {
('A', 'T') | ('T', 'A') | ('C', 'G') | ('G', 'C') => 1 + dp[i - 2],
_ => dp[i - 1],
};
pre = s[i];
}
println!("{}", dp[n - 1]);
}
|
use std::io::{self};
fn part_1(file_content: &Vec<String>) -> i32 {
0
}
fn part_2(file_content: &Vec<String>) -> i32 {
0
}
fn main() -> io::Result<()> {
let files_results = vec![("test.txt", 0, 0), ("input.txt", 0, 0)];
for (f, result_1, result_2) in files_results.into_iter() {
println!("{}", f);
let file_content: Vec<String> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.to_string())
.collect();
let res_1 = part_1(&file_content);
assert_eq!(res_1, result_1);
let res_2 = part_2(&file_content);
assert_eq!(res_2, result_2);
}
Ok(())
}
|
#[doc = "Register `CIFR` reader"]
pub type R = crate::R<CIFR_SPEC>;
#[doc = "Field `LSIRDYF` reader - LSI ready interrupt flag Reset by software by writing LSIRDYC bit. Set by hardware when the LSI clock becomes stable and LSIRDYIE is set."]
pub type LSIRDYF_R = crate::BitReader;
#[doc = "Field `LSERDYF` reader - LSE ready interrupt flag Reset by software by writing LSERDYC bit. Set by hardware when the LSE clock becomes stable and LSERDYIE is set."]
pub type LSERDYF_R = crate::BitReader;
#[doc = "Field `CSIRDYF` reader - CSI ready interrupt flag Reset by software by writing CSIRDYC bit. Set by hardware when the CSI clock becomes stable and CSIRDYIE is set."]
pub type CSIRDYF_R = crate::BitReader;
#[doc = "Field `HSIRDYF` reader - HSI ready interrupt flag Reset by software by writing HSIRDYC bit. Set by hardware when the HSI clock becomes stable and HSIRDYIE is set."]
pub type HSIRDYF_R = crate::BitReader;
#[doc = "Field `HSERDYF` reader - HSE ready interrupt flag Reset by software by writing HSERDYC bit. Set by hardware when the HSE clock becomes stable and HSERDYIE is set."]
pub type HSERDYF_R = crate::BitReader;
#[doc = "Field `HSI48RDYF` reader - HSI48 ready interrupt flag Reset by software by writing HSI48RDYC bit. Set by hardware when the HSI48 clock becomes stable and HSI48RDYIE is set."]
pub type HSI48RDYF_R = crate::BitReader;
#[doc = "Field `PLL1RDYF` reader - PLL1 ready interrupt flag Reset by software by writing PLL1RDYC bit. Set by hardware when the PLL1 locks and PLL1RDYIE is set."]
pub type PLL1RDYF_R = crate::BitReader;
#[doc = "Field `PLL2RDYF` reader - PLL2 ready interrupt flag Reset by software by writing PLL2RDYC bit. Set by hardware when the PLL2 locks and PLL2RDYIE is set."]
pub type PLL2RDYF_R = crate::BitReader;
#[doc = "Field `HSECSSF` reader - HSE clock security system interrupt flag Reset by software by writing HSECSSC bit. Set by hardware in case of HSE clock failure."]
pub type HSECSSF_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - LSI ready interrupt flag Reset by software by writing LSIRDYC bit. Set by hardware when the LSI clock becomes stable and LSIRDYIE is set."]
#[inline(always)]
pub fn lsirdyf(&self) -> LSIRDYF_R {
LSIRDYF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - LSE ready interrupt flag Reset by software by writing LSERDYC bit. Set by hardware when the LSE clock becomes stable and LSERDYIE is set."]
#[inline(always)]
pub fn lserdyf(&self) -> LSERDYF_R {
LSERDYF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - CSI ready interrupt flag Reset by software by writing CSIRDYC bit. Set by hardware when the CSI clock becomes stable and CSIRDYIE is set."]
#[inline(always)]
pub fn csirdyf(&self) -> CSIRDYF_R {
CSIRDYF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - HSI ready interrupt flag Reset by software by writing HSIRDYC bit. Set by hardware when the HSI clock becomes stable and HSIRDYIE is set."]
#[inline(always)]
pub fn hsirdyf(&self) -> HSIRDYF_R {
HSIRDYF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - HSE ready interrupt flag Reset by software by writing HSERDYC bit. Set by hardware when the HSE clock becomes stable and HSERDYIE is set."]
#[inline(always)]
pub fn hserdyf(&self) -> HSERDYF_R {
HSERDYF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - HSI48 ready interrupt flag Reset by software by writing HSI48RDYC bit. Set by hardware when the HSI48 clock becomes stable and HSI48RDYIE is set."]
#[inline(always)]
pub fn hsi48rdyf(&self) -> HSI48RDYF_R {
HSI48RDYF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - PLL1 ready interrupt flag Reset by software by writing PLL1RDYC bit. Set by hardware when the PLL1 locks and PLL1RDYIE is set."]
#[inline(always)]
pub fn pll1rdyf(&self) -> PLL1RDYF_R {
PLL1RDYF_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - PLL2 ready interrupt flag Reset by software by writing PLL2RDYC bit. Set by hardware when the PLL2 locks and PLL2RDYIE is set."]
#[inline(always)]
pub fn pll2rdyf(&self) -> PLL2RDYF_R {
PLL2RDYF_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 10 - HSE clock security system interrupt flag Reset by software by writing HSECSSC bit. Set by hardware in case of HSE clock failure."]
#[inline(always)]
pub fn hsecssf(&self) -> HSECSSF_R {
HSECSSF_R::new(((self.bits >> 10) & 1) != 0)
}
}
#[doc = "RCC clock source interrupt flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cifr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CIFR_SPEC;
impl crate::RegisterSpec for CIFR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cifr::R`](R) reader structure"]
impl crate::Readable for CIFR_SPEC {}
#[doc = "`reset()` method sets CIFR to value 0"]
impl crate::Resettable for CIFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// use self::organizer;
pub mod models;
pub mod organizer;
pub mod shared;
|
use std::fs::{File, metadata, OpenOptions};
use std::path::Path;
use std::error::Error;
use update_type::UpdateType;
#[derive(Serialize, Deserialize)]
pub struct Revision {
pub id: u64,
pub byte_offset: u64,
pub rows: u64
}
#[derive(Clone)]
pub struct CsvLog {
pub basename: String,
pub filename: String,
pub index_filename: String
}
impl CsvLog {
pub fn new(basename: &str) -> Self {
CsvLog {
basename: String::from(basename),
filename: format!("{}.csv", basename),
index_filename: format!("{}.revisions.csv", basename)
}
}
fn create_empty_logfile(&mut self, headers: &csv::ByteRecord) -> Result<(), Box<Error>> {
let logfile = File::create(&self.filename)?;
let mut writer = csv::Writer::from_writer(logfile);
let mut full_headers = csv::ByteRecord::new();
for header in headers.iter() {
full_headers.push_field(header);
}
full_headers.push_field("LogUpdateType".as_bytes());
writer.write_record(&full_headers)?;
writer.flush()?;
Ok(())
}
pub fn create_revision(&mut self, headers: &csv::ByteRecord) -> Result<LogRevisionWriter, Box<Error>> {
if !Path::new(&self.filename).exists() {
self.create_empty_logfile(headers)?;
}
LogRevisionWriter::new(self.clone())
}
pub fn export_revision<T: std::io::Write>(&self, revision: u64, writer: &mut csv::Writer<T>) -> Result<u64, Box<Error>> {
let logfile_index = File::open(&self.index_filename)?;
let mut logfile_index_reader = csv::Reader::from_reader(logfile_index);
for index_result in logfile_index_reader.deserialize() {
let rev: Revision = index_result?;
if rev.id != revision {
continue;
}
let logfile = File::open(&self.filename)?;
let mut logfile_reader = csv::Reader::from_reader(logfile);
let mut pos = csv::Position::new();
let mut rows = 0;
writer.write_record(logfile_reader.headers()?)?;
pos.set_byte(rev.byte_offset);
logfile_reader.seek(pos).unwrap();
for result in logfile_reader.records() {
let record = result?;
writer.write_record(&record)?;
rows += 1;
if rows == rev.rows {
break;
}
}
return Ok(rows);
}
Ok(0)
}
}
pub struct LogRevisionWriter {
info: CsvLog,
rev: Revision,
logfile_writer: csv::Writer<File>
}
impl LogRevisionWriter {
fn new(info: CsvLog) -> Result<Self, Box<Error>> {
let byte_offset = metadata(&info.filename)?.len();
let logfile = OpenOptions::new()
.write(true).append(true).open(&info.filename)?;
let logfile_writer = csv::Writer::from_writer(logfile);
Ok(LogRevisionWriter {
info,
rev: Revision {
id: 0,
byte_offset,
rows: 0
},
logfile_writer
})
}
pub fn write(&mut self, utype: &UpdateType, record: &mut csv::ByteRecord) -> Result<(), Box<Error>> {
self.rev.rows += 1;
record.push_field(utype.as_str().as_bytes());
self.logfile_writer.write_record(record as &csv::ByteRecord)?;
Ok(())
}
pub fn complete(mut self) -> Result<Option<Revision>, Box<Error>> {
if self.rev.rows == 0 {
return Ok(None);
}
let logfile_index_path = Path::new(&self.info.index_filename);
self.logfile_writer.flush()?;
let id = write_logfile_index_revision(logfile_index_path, self.rev.byte_offset, self.rev.rows)?;
self.rev.id = id;
Ok(Some(self.rev))
}
}
fn create_empty_logfile_index(path: &Path) -> Result<(), Box<Error>> {
let logfile = File::create(path)?;
let mut writer = csv::Writer::from_writer(logfile);
writer.write_record(vec!["id", "byte_offset", "rows"])?;
writer.flush()?;
Ok(())
}
fn get_latest_logfile_index_revision(path: &Path) -> Result<u64, Box<Error>> {
let file = File::open(path)?;
let mut reader = csv::Reader::from_reader(file);
let mut latest = 0;
for result in reader.deserialize() {
let rev: Revision = result?;
if rev.id > latest {
latest = rev.id;
}
}
Ok(latest)
}
fn write_logfile_index_revision(path: &Path, byte_offset: u64, rows: u64) -> Result<u64, Box<Error>> {
if !path.exists() {
create_empty_logfile_index(path)?;
}
let id = get_latest_logfile_index_revision(path)? + 1;
let logfile_index = OpenOptions::new()
.write(true).append(true).open(path)?;
let mut logfile_index_writer = csv::WriterBuilder::new()
.has_headers(false)
.from_writer(logfile_index);
logfile_index_writer.serialize(Revision {
id: id,
byte_offset: byte_offset,
rows: rows
})?;
Ok(id)
}
|
//! Embedded SPI helper package
//! This defines a higher level `Transactional` SPI interface, as well as an SPI `Transaction` enumeration
//! that more closely map to the common uses of SPI peripherals, as well as some other common driver helpers.
//!
//! An `embedded_spi::wrapper::Wrapper` type is provided to wrap existing SPI implementations in this
//! `embedded_spi::Transactional` interface, as well as a set of helpers for C compatibility enabled with
//! the `compat` feature, and a basic mocking adaptor enabled with the `mock` feature.
#![cfg_attr(not(feature = "utils"), no_std)]
#[macro_use]
extern crate log;
extern crate embedded_hal;
pub mod wrapper;
#[cfg(feature = "mock")]
extern crate std;
#[cfg(feature = "mock")]
pub mod mock;
#[cfg(feature = "ffi")]
extern crate libc;
#[cfg(feature = "ffi")]
pub mod ffi;
#[cfg(feature = "utils")]
extern crate serde;
#[cfg(feature = "utils")]
extern crate toml;
#[cfg(feature = "utils")]
extern crate simplelog;
#[cfg(feature = "utils")]
extern crate linux_embedded_hal;
#[cfg(feature = "utils")]
pub mod utils;
/// Transaction trait provides higher level, transaction-based, SPI constructs
/// These are executed in a single SPI transaction (without de-asserting CS).
pub trait Transactional {
type Error;
/// Read writes the prefix buffer then reads into the input buffer
/// Note that the values of the input buffer will also be output, because, SPI...
fn spi_read(&mut self, prefix: &[u8], data: &mut [u8]) -> Result<(), Self::Error>;
/// Write writes the prefix buffer then writes the output buffer
fn spi_write(&mut self, prefix: &[u8], data: &[u8]) -> Result<(), Self::Error>;
/// Transfer writes the outgoing buffer while reading into the incoming buffer
/// note that outgoing and incoming must have the same length
//fn transfer(&mut self, outgoing: &[u8], incoming: &mut [u8]) -> Result<(), Self::Error>;
/// Exec allows 'Transaction' objects to be chained together into a single transaction
fn spi_exec(&mut self, transactions: &mut [Transaction]) -> Result<(), Self::Error>;
}
/// Transaction enum defines possible SPI transactions
#[derive(Debug, PartialEq)]
pub enum Transaction<'a> {
// Write the supplied buffer to the peripheral
Write(&'a [u8]),
// Read from the peripheral into the supplied buffer
Read(&'a mut [u8]),
// Write the first buffer while reading into the second
// This behaviour is actually just the same as Read
//Transfer((&'a [u8], &'a mut [u8]))
}
/// Busy trait for peripherals that support a busy signal
pub trait Busy {
type Error;
/// Returns the busy pin state if bound
fn get_busy(&mut self) -> Result<PinState, Self::Error>;
}
/// Reset trait for peripherals that have a reset or shutdown pin
pub trait Reset {
type Error;
/// Set the reset pin state if available
fn set_reset(&mut self, state: PinState) -> Result<(), Self::Error>;
}
/// Ready trait for peripherals that support a ready signal (or IRQ)
pub trait Ready {
type Error;
/// Returns the busy pin state if bound
fn get_ready(&mut self) -> Result<PinState, Self::Error>;
}
/// Error type combining SPI and Pin errors for utility
#[derive(Debug, Clone, PartialEq)]
pub enum Error<SpiError, PinError> {
Spi(SpiError),
Pin(PinError),
Aborted,
}
/// PinState enum used for busy indication
#[derive(Debug, Clone, PartialEq)]
pub enum PinState {
Low,
High,
}
|
use std::sync::Arc;
use crate::*;
pub struct Sphere {
center: Vector3,
radius: f32,
material: Arc<Material>,
}
impl Sphere {
pub fn new<T: Material + 'static, U: Into<Arc<T>>>(
center: &Vector3,
radius: f32,
material: U,
) -> Sphere {
Sphere {
center: *center,
radius,
material: material.into(),
}
}
pub fn center(&self) -> &Vector3 {
&self.center
}
pub fn radius(&self) -> f32 {
self.radius
}
}
unsafe impl Send for Sphere {}
unsafe impl Sync for Sphere {}
impl Hittable for Sphere {
fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
let co = ray.origin() - &self.center; // center to origin
let a = ray.direction().dot(ray.direction());
let b = 2.0 * ray.direction().dot(&co);
let c = co.dot(&co) - self.radius * self.radius;
let discriminant = b * b - 4.0 * a * c;
let t = {
|| -> Option<f32> {
if discriminant < 0.0 {
return None;
}
let t = (-b - discriminant.sqrt()) / (2.0 * a);
if t > t_min && t < t_max {
return Some(t);
}
let t = (-b + discriminant.sqrt()) / (2.0 * a);
if t > t_min && t < t_max {
return Some(t);
}
None
}()
};
match t {
Some(t) => {
let position = ray.point_at(t);
let normal = &(&position - &self.center) / self.radius;
let (u, v) = common::get_sphere_uv(&normal);
Some(HitRecord {
t,
position,
normal,
material: self.material.clone(),
u,
v,
})
}
None => None,
}
}
fn bounding_box(&self, _t0: f32, _t1: f32) -> Option<AABB> {
let half = Vector3::new(self.radius, self.radius, self.radius);
Some(AABB::new(self.center - half, self.center + half))
}
}
|
use ethereum_types::{Address, U256};
use rustc_hex::FromHex;
use solana_client::rpc_client::RpcClient;
use solana_sdk::{program_pack::Pack, pubkey::Pubkey};
use uniswap_program::state::UniswapOracle;
pub struct MoebiusApi {
client: RpcClient,
}
impl MoebiusApi {
pub fn new() -> Self {
Self {
client: RpcClient::new("http://127.0.0.1:8899".to_string()),
}
}
pub fn with_rpc_url(mut self, url: String) -> Self {
self.client = RpcClient::new(url);
self
}
}
#[derive(Debug, PartialEq)]
pub struct Pricefeed {
pub token0: Address,
pub amount0: U256,
pub decimal0: u8,
pub token1: Address,
pub amount1: U256,
pub decimal1: u8,
}
impl Pricefeed {
pub fn price_token0_token1(&self) -> f64 {
let amount0 = self.amount0.as_u128() as f64 / 10u128.pow(self.decimal0 as u32) as f64;
let amount1 = self.amount1.as_u128() as f64 / 10u128.pow(self.decimal1 as u32) as f64;
amount0 / amount1
}
pub fn price_token1_token0(&self) -> f64 {
1.0f64 / self.price_token0_token1()
}
}
impl MoebiusApi {
pub fn uniswap_oracle(&self, token0: &str, token1: &str) -> anyhow::Result<Pricefeed> {
let token0 = token0.from_hex::<Vec<u8>>()?;
let token1 = token1.from_hex::<Vec<u8>>()?;
let (uniswap_account, _) = Pubkey::find_program_address(
&[&token0.as_slice(), &token1.as_slice()],
&uniswap_program::id(),
);
let data = self.client.get_account_data(&uniswap_account)?;
let oracle = UniswapOracle::unpack_unchecked(data.as_slice())?;
Ok(Pricefeed {
token0: Address::from_slice(&oracle.token0[..]),
decimal0: oracle.decimal0,
amount0: U256::from_big_endian(&oracle.amount0[..]),
token1: Address::from_slice(&oracle.token1[..]),
decimal1: oracle.decimal1,
amount1: U256::from_big_endian(&oracle.amount1[..]),
})
}
}
|
use super::super::HasTable;
use super::{Component, FromWorld, View, World};
use crate::tables::unique_table::UniqueTable;
use crate::tables::TableId;
use std::ops::Deref;
/// Fetch read-only tables from a Storage
///
pub struct UnwrapView<'a, Id: TableId, C: Component<Id>>(&'a UniqueTable<Id, C>);
impl<'a, Id: TableId, C: Component<Id>> Clone for UnwrapView<'a, Id, C> {
fn clone(&self) -> Self {
UnwrapView(self.0)
}
}
impl<'a, Id: TableId, C: Component<Id>> Copy for UnwrapView<'a, Id, C> {}
unsafe impl<'a, Id: TableId, C: Component<Id>> Send for UnwrapView<'a, Id, C> {}
unsafe impl<'a, Id: TableId, C: Component<Id>> Sync for UnwrapView<'a, Id, C> {}
impl<'a, Id: TableId, C: Component<Id>> UnwrapView<'a, Id, C> {
pub fn reborrow(self) -> &'a UniqueTable<Id, C> {
self.0
}
pub fn from_table(t: &'a UniqueTable<Id, C>) -> Self {
Self(t)
}
}
impl<'a, Id: TableId, C: Component<Id>> Deref for UnwrapView<'a, Id, C> {
type Target = C;
fn deref(&self) -> &Self::Target {
self.0
.value
.as_ref()
.expect("UnwrapView dereferenced with an empty table")
}
}
impl<'a, Id: TableId, C: Default + Component<Id, Table = UniqueTable<Id, C>>> FromWorld<'a>
for UnwrapView<'a, Id, C>
where
crate::world::World: HasTable<Id, C>,
{
fn from_world(w: &'a World) -> Self {
let table: &UniqueTable<Id, C> = View::from_world(w).reborrow();
UnwrapView(table)
}
}
|
//! # The Chain Specification
//!
//! By default, when simply running CKB, CKB will connect to the official public Nervos network.
//!
//! In order to run a chain different to the official public one,
//! with a config file specifying chain = "path" under [ckb].
//!
//! Because the limitation of toml library,
//! we must put nested config struct in the tail to make it serializable,
//! details https://docs.rs/toml/0.5.0/toml/ser/index.html
use crate::consensus::Consensus;
use ckb_core::block::Block;
use ckb_core::block::BlockBuilder;
use ckb_core::header::HeaderBuilder;
use ckb_core::transaction::{CellInput, CellOutput, Transaction, TransactionBuilder};
use ckb_core::{BlockNumber, Capacity, Cycle};
use ckb_pow::{Pow, PowEngine};
use ckb_resource::{Resource, ResourceLocator};
use numext_fixed_hash::H256;
use numext_fixed_uint::U256;
use occupied_capacity::OccupiedCapacity;
use serde_derive::{Deserialize, Serialize};
use std::error::Error;
use std::fmt;
use std::path::PathBuf;
use std::sync::Arc;
pub mod consensus;
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ChainSpec {
pub resource: Resource,
pub name: String,
pub genesis: Genesis,
pub params: Params,
pub system_cells: Vec<Resource>,
pub pow: Pow,
}
// change the order will break integration test, see module doc.
#[derive(Serialize, Deserialize)]
pub struct ChainSpecConfig {
pub name: String,
pub genesis: Genesis,
pub params: Params,
pub system_cells: Vec<SystemCell>,
pub pow: Pow,
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct Params {
pub initial_block_reward: Capacity,
pub max_block_cycles: Cycle,
pub cellbase_maturity: BlockNumber,
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct Genesis {
pub version: u32,
pub parent_hash: H256,
pub timestamp: u64,
pub difficulty: U256,
pub uncles_hash: H256,
pub hash: Option<H256>,
pub seal: Seal,
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct Seal {
pub nonce: u64,
pub proof: Vec<u8>,
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
pub struct SystemCell {
pub path: PathBuf,
}
#[derive(Debug)]
pub struct FileNotFoundError;
impl FileNotFoundError {
fn boxed() -> Box<Self> {
Box::new(FileNotFoundError)
}
}
impl Error for FileNotFoundError {}
impl fmt::Display for FileNotFoundError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ChainSpec: file not found")
}
}
pub struct GenesisError {
expect: H256,
actual: H256,
}
impl GenesisError {
fn boxed(self) -> Box<Self> {
Box::new(self)
}
}
impl Error for GenesisError {}
impl fmt::Debug for GenesisError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"GenesisError: hash mismatch, expect {:x}, actual {:x}",
self.expect, self.actual
)
}
}
impl fmt::Display for GenesisError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
impl ChainSpec {
pub fn resolve_relative_to(
locator: &ResourceLocator,
spec_path: PathBuf,
config_file: &Resource,
) -> Result<ChainSpec, Box<Error>> {
let resource = match locator.resolve_relative_to(spec_path, config_file) {
Some(r) => r,
None => return Err(FileNotFoundError::boxed()),
};
let config_bytes = resource.get()?;
let spec_config: ChainSpecConfig = toml::from_slice(&config_bytes)?;
let system_cells_result: Result<Vec<_>, FileNotFoundError> = spec_config
.system_cells
.into_iter()
.map(|c| {
locator
.resolve_relative_to(c.path, &resource)
.ok_or(FileNotFoundError)
})
.collect();
Ok(ChainSpec {
resource,
system_cells: system_cells_result?,
name: spec_config.name,
genesis: spec_config.genesis,
params: spec_config.params,
pow: spec_config.pow,
})
}
pub fn pow_engine(&self) -> Arc<dyn PowEngine> {
self.pow.engine()
}
fn build_system_cells_transaction(&self) -> Result<Transaction, Box<Error>> {
let outputs_result: Result<Vec<_>, _> = self
.system_cells
.iter()
.map(|c| {
c.get()
.map_err(|err| Box::new(err) as Box<Error>)
.and_then(|data| {
// TODO: we should provide a proper lock script here so system cells
// can be updated.
let mut cell = CellOutput::default();
cell.data = data.into_owned().into();
cell.capacity = cell.occupied_capacity()?;
Ok(cell)
})
})
.collect();
let outputs = outputs_result?;
Ok(TransactionBuilder::default()
.outputs(outputs)
.input(CellInput::new_cellbase_input(0))
.build())
}
fn verify_genesis_hash(&self, genesis: &Block) -> Result<(), Box<Error>> {
if let Some(ref expect) = self.genesis.hash {
let actual = genesis.header().hash();
if &actual != expect {
return Err(GenesisError {
actual,
expect: expect.clone(),
}
.boxed());
}
}
Ok(())
}
pub fn to_consensus(&self) -> Result<Consensus, Box<Error>> {
let header_builder = HeaderBuilder::default()
.version(self.genesis.version)
.parent_hash(self.genesis.parent_hash.clone())
.timestamp(self.genesis.timestamp)
.difficulty(self.genesis.difficulty.clone())
.nonce(self.genesis.seal.nonce)
.proof(self.genesis.seal.proof.to_vec())
.uncles_hash(self.genesis.uncles_hash.clone());
let genesis_block = BlockBuilder::default()
.transaction(self.build_system_cells_transaction()?)
.with_header_builder(header_builder);
self.verify_genesis_hash(&genesis_block)?;
let consensus = Consensus::default()
.set_id(self.name.clone())
.set_genesis_block(genesis_block)
.set_cellbase_maturity(self.params.cellbase_maturity)
.set_initial_block_reward(self.params.initial_block_reward)
.set_max_block_cycles(self.params.max_block_cycles)
.set_pow(self.pow.clone());
Ok(consensus)
}
}
#[cfg(test)]
pub mod test {
use super::*;
use ckb_core::script::Script;
use serde_derive::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Clone, Debug, Serialize, Deserialize)]
struct SystemCellHashes {
pub path: String,
pub code_hash: H256,
pub script_hash: H256,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct SpecHashes {
pub genesis: H256,
pub system_cells_transaction: H256,
pub system_cells: Vec<SystemCellHashes>,
}
fn load_spec_by_name(name: &str) -> ChainSpec {
let spec_path = match name {
"ckb_dev" => PathBuf::from("specs/dev.toml"),
"ckb_testnet" => PathBuf::from("specs/testnet.toml"),
_ => panic!("Unknown spec name {}", name),
};
let locator = ResourceLocator::current_dir().unwrap();
let ckb = Resource::Bundled("ckb.toml".to_string());
ChainSpec::resolve_relative_to(&locator, spec_path, &ckb).expect("load spec by name")
}
#[test]
fn test_bundled_specs() {
let bundled_spec_err: &str = r#"
Unmatched Bundled Spec.
Forget to generate docs/hashes.toml? Try to run;
ckb cli hashes -b > docs/hashes.toml
"#;
let spec_hashes: HashMap<String, SpecHashes> =
toml::from_str(include_str!("../../docs/hashes.toml")).unwrap();
for (name, spec_hashes) in spec_hashes.iter() {
let spec = load_spec_by_name(name);
assert_eq!(name, &spec.name, "{}", bundled_spec_err);
if let Some(genesis_hash) = &spec.genesis.hash {
assert_eq!(genesis_hash, &spec_hashes.genesis, "{}", bundled_spec_err);
}
let consensus = spec.to_consensus().expect("spec to consensus");
let block = consensus.genesis_block();
let cells_tx = &block.transactions()[0];
assert_eq!(spec_hashes.system_cells_transaction, cells_tx.hash());
for (output, cell_hashes) in cells_tx
.outputs()
.iter()
.zip(spec_hashes.system_cells.iter())
{
let code_hash = output.data_hash();
let script_hash = Script::new(vec![], code_hash.clone()).hash();
assert_eq!(cell_hashes.code_hash, code_hash, "{}", bundled_spec_err);
assert_eq!(cell_hashes.script_hash, script_hash, "{}", bundled_spec_err);
}
}
}
}
|
/* File_config_functionalities_pmz */
// conditions:
/*
exec trait: [alias:(file/any)] [operations] [path/name] [parameters/--options]
Read:Content _ none
Write:Content _ String : if use multi line content ->
check len()::Enum -> i32|&str
Update:Content _ String
Delete:Content _ none
Some file operations need parameters and some don't;
*/
mod parameters;
pub use super::interface::{self, components, printer, template_engine, text_processing};
pub use super::utility::{self, ErrorHandler::FileError};
use parameters::filter_param;
use printer::TermCfg;
use std::{
fs::File,
io::prelude::*,
io::{self},
time::Duration,
};
use template_engine::TemplateBuilder;
use template_engine::TemplateEngine;
type Params = [Vec<String>; 2];
use std::collections::hash_map::HashMap;
#[derive(Debug, PartialOrd, PartialEq)]
pub struct Fileconfig {
name: String,
access_at: Duration,
query: String,
parameters: Params,
// content:Option<String>,
content: String,
path: String,
}
impl Fileconfig {
pub fn new(param: &str, timestamp: fn() -> Duration) -> Result<Fileconfig, &'static str> {
let mut command_chunk = Vec::new();
for res in param.trim().split_whitespace() {
command_chunk.push(res.to_owned());
}
if command_chunk.len() < 3 {
return Err("Insufficient parameters to run file operations!");
}
let capture = |index: usize| command_chunk.get(index).unwrap().to_owned();
let mut vc: [Vec<String>; 2] = [Vec::new(), Vec::new()];
if command_chunk.len() > 3 {
let v_param = command_chunk[3..command_chunk.len()].to_owned();
let p_vec = v_param.into_iter().map(|p_str| String::from(p_str));
// let tup = (p_reg,quote_word);
//^"[a-zA-Z-\s]+"
let throw_reg_panic =
|regex_err: regex::Error| panic!("Verification Errors! : {}", regex_err);
//^<\w++>$
let p_reg = regex::Regex::new(r"^\--+[a-zA-Z]+").unwrap_or_else(|x| throw_reg_panic(x));
let quote_word = regex::Regex::new(r#"(["'])((\\{2})*|(.*?[^\\](\\{2})*))"#)
.unwrap_or_else(|x| throw_reg_panic(x));
let match_inside_brac = regex::Regex::new(r"^\[(.*)\]$").unwrap();
p_vec.for_each(|x| {
if match_inside_brac.is_match(&x) || quote_word.is_match(&x) {
vc[0].push(x);
} else if p_reg.is_match(&x) {
vc[1].push(x);
}
})
}
let result = Fileconfig {
name: capture(2),
query: capture(1),
path: capture(2),
access_at: timestamp(),
parameters: vc,
content: String::from("None"),
};
Ok(result)
}
fn parse_quotation(&self, param: &Vec<String>) -> Vec<String> {
let quoted = |st: &str| st.starts_with("\"") && st.ends_with("\"");
param
.into_iter()
.filter(|st| quoted(st))
.map(|quote_par| {
text_processing::CrumpCluster::break_chunk("e_par)
.delete(0, Some(1))
.delete(quote_par.len() - 1, Some(quote_par.len()))
.merge_crump()
})
.collect::<Vec<String>>()
}
fn parse_bracket(&self, param: &Vec<String>) -> Vec<String> {
let match_brack: &[_] = &['[', ']', '\"'];
param
.iter()
// .filter(|general_param| match_inside_brac.is_match(general_param))
.flat_map(|bk_par| {
let split_brack = bk_par
.trim_matches(match_brack)
.split_whitespace()
.map(|f| f.to_string())
.collect::<Vec<String>>();
return split_brack;
})
.collect::<Vec<String>>()
// .filter(|bracketed|);
}
pub fn run(&self) -> Result<(), FileError> {
let init_ptr = TermCfg::new()
.set_attr(console::Attribute::Bold)
.set_attr(console::Attribute::Italic);
let print = init_ptr.gen_print(Some(console::Color::Blue));
let mut print_ln = init_ptr.gen_println(Some(console::Color::Blue));
let mut err_collector: Vec<FileError> = Vec::new();
let display_txt = |txt: &str| -> template_engine::Template {
let mut tmp_engine = template_engine::TemplateFactory::init()
.parse_in_template(txt)
.create_movable()
.collect();
let template = tmp_engine.padding(vec![1, 6, 6, 3]);
template.to_owned()
};
match self.query.as_str() {
"update" => {
// self.write(params[0], params[1].parse::<i32>().unwrap());
println!("what is your ct?");
let elim_quote = self.parse_bracket(&self.parameters[0]);
self.update(&elim_quote[1], elim_quote[0].clone().as_str());
}
"search" => {
let unquote = self.parse_bracket(&self.parameters[0]);
print_ln(&format!("<->statistics of word {:?}<->", unquote))?;
let mut p = init_ptr.gen_println(Some(console::Color::Blue));
for quoted in unquote {
let quoted = filter_param(&self.parameters[1], "ed);
let filtered = filter_param(&self.parameters[1], "ed);
match self.search(&filtered) {
Ok(found_map) => {
print!("Highligted-Text: \n");
let full_content = self.read().unwrap();
let total_line = found_map.len();
let mut key_holder = Vec::new();
found_map.iter().for_each(|(key, _)| key_holder.push(key));
let mut count = 0;
let mut crumps = full_content
.lines()
.into_iter()
.enumerate()
.map(|(idx, x)| {
(idx as i64, text_processing::CrumpCluster::break_chunk(x))
})
.collect::<Vec<(i64, text_processing::CrumpCluster)>>();
while count < found_map.len() {
// each_indx.iter().for_each(|x|)
crumps.iter_mut().for_each(|(loc, crump)| {
if loc == key_holder[count] {
let locations = found_map.get(loc).unwrap();
locations.into_iter().for_each(|(start, end)| {
crump.delete(*start, Some(*end));
crump.insert(
*start,
&format!("--->\"{}\"<---", quoted.clone().trim(),)
.trim(),
);
});
}
});
count += 1;
}
let fully_merged = crumps
.iter()
.map(|(_, crump)| {
let merged = crump.merge_crump();
return merged;
})
.collect::<String>();
// display_txt(&fully_merged, "+/");
if total_line <= 1 {
p(&"No word found in the text!")?;
} else {
display_txt(&fully_merged)
.border("+", components::BorderWeight::Bold)
.center_box()
.display();
p(&format!(
"->Number of line that contain word /{}/: {}",
quoted, total_line
))?;
p(&format!(
"Total number of words /{}/ {}",
quoted,
count_found_map(found_map)
))?;
}
}
Err(file_err) => err_collector.push(file_err),
}
}
}
"read" => {
let result = self.read();
print_ln("Reading contains : ")?;
match result {
Ok(txt) => {
display_txt(&filter_param(&self.parameters[1], &txt))
.border("+", components::BorderWeight::Bold)
.center_box()
.display();
}
Err(file_err) => {
err_collector.push(file_err);
}
}
}
_ => err_collector.push(FileError::new().set_message("Invalid operation!")),
}
if err_collector.len() > 0 {
Err(err_collector.into_iter().next().unwrap())
} else {
Ok(())
}
}
}
type OriResult<T> = Result<T, FileError>;
/*positions : [{ Number of line to modify / word to replace / newdoc }]*/
pub trait TextPos {
fn modify(&self, content: String, new_str: &str) -> Vec<String>;
}
// [x1,x2,"string"]
// replace all word within that target across all content
impl TextPos for &str {
fn modify(&self, content: String, new_str: &str) -> Vec<String> {
if self.contains(" ") {
let multi_tar = self.split_whitespace().collect::<Vec<&str>>();
let emp = multi_tar
.iter()
.map(|x| {
let xt = content.replace(*x, new_str);
if xt != content {
return xt;
} else {
"None".to_string()
}
})
.filter(|x| *x != "None".to_string())
.collect::<Vec<String>>();
// println!("special emp {:#?}",emp);
return emp;
} else {
let mut result: Vec<String> = Vec::new();
result.push(content.replace(self, new_str));
return result;
}
}
}
pub trait Operation {
fn read(&self) -> OriResult<String>;
fn update<T>(&self, new_content: &str, target: T)
where
T: TextPos;
fn search(&self, target: &str) -> Result<HashMap<i64, Vec<(usize, usize)>>, FileError>;
}
fn checkempty(result: &str) -> OriResult<String> {
if result.is_empty() {
let empty_err = FileError::new().set_message("The Folder is Empty inside");
Err(empty_err)
} else {
Ok(result.trim().to_string())
}
}
impl Operation for Fileconfig {
fn read(&self) -> OriResult<String> {
let file = File::open(&self.path)?;
let mut buffer = io::BufReader::new(file);
let mut result = String::new();
buffer.read_to_string(&mut result)?;
checkempty(&result)
}
// use for string only
fn update<T: TextPos>(&self, new_content: &str, target: T) {
/* if target is multiple start spit out different result to different file! */
let existed_content = self.read().expect("Cannot open that file");
let mutation = target.modify(existed_content.to_string(), new_content);
println!("muttip {:?}", mutation);
let mut count = 0;
for n in mutation {
let new_path = format!("output -- {} [{}]", self.path, count);
let mut newfile = File::create(new_path).unwrap();
newfile.write_all(n.as_bytes()).unwrap();
count += 1;
}
}
// regex for search: ^"[a-zA-Z-\s]+"
fn search(&self, target: &str) -> Result<HashMap<i64, Vec<(usize, usize)>>, FileError> {
let mut err_clt = String::new();
// let found_map = Vec::new();
let mut found_map: HashMap<i64, Vec<(usize, usize)>> = HashMap::new();
if self.parameters.is_empty() {
err_clt.push_str("No params!")
}
let mut content = String::new();
match self.read() {
Ok(ct) => content.push_str(&ct),
Err(read_error) => err_clt.push_str(&read_error.message),
}
let mut count: i64 = 0;
let mut line_found = Vec::new();
for (line_num, line) in content.lines().enumerate() {
let each_line = line.trim();
let word_group = each_line.split_whitespace().collect::<Vec<&str>>();
let reg = regex::Regex::new(&format!(r"{}", target)).unwrap();
let mut indx_vec = Vec::new();
for found in reg.find_iter(line) {
let key_indx = (found.start(), found.end());
indx_vec.push(key_indx);
}
if word_group.len() >= 1 && word_group.into_iter().any(|word| word.contains(target)) {
line_found.push(line_num);
found_map.insert(line_num as i64, indx_vec);
count += 1;
}
}
if err_clt.len() > 0 {
let bruh = FileError::new().set_message(&err_clt.clone());
return Err(bruh);
} else {
return Ok(found_map);
}
/**/
}
}
impl Clone for Fileconfig {
fn clone(&self) -> Self {
return Fileconfig {
name: self.name.clone(),
access_at: self.access_at,
query: self.query.clone(),
parameters: self.parameters.clone(),
// content:Option<String>,
content: self.content.clone(),
path: self.path.clone(),
};
}
}
fn count_found_map(hsm: HashMap<i64, Vec<(usize, usize)>>) -> usize {
let mut count: usize = 0;
for (_, hs) in hsm {
hs.iter().for_each(|_| count += 1);
}
return count;
}
#[test]
fn test() {
let match_inside_brac = regex::Regex::new(r"^\[(.*)\]$").unwrap();
let test = "[Apple sauce bananan ba;;;a]";
println!("t {}", test);
let x: &[_] = &['[', ']'];
println!(
"test {:?} ",
(match_inside_brac.is_match(test), test.trim_matches(x))
);
}
|
use std::collections::HashSet;
use std::str::FromStr;
use anyhow::{anyhow, Result};
use crate::Challenge;
use itertools::Itertools;
pub struct Day08;
impl Challenge for Day08 {
const DAY_NUMBER: u32 = 8;
type InputType = AssemblyEmulator;
type OutputType = i32;
fn part1(input: &Self::InputType) -> Result<Self::OutputType> {
let mut ae = input.clone();
let mut instructions = HashSet::new();
while !instructions.contains(&ae.program_counter()) {
instructions.insert(ae.program_counter());
ae.step();
}
Ok(ae.acc())
}
// Naive brute-force solution
fn part2(input: &Self::InputType) -> Result<Self::OutputType> {
for (instruction_idx, _) in
input
.instructions
.iter()
.enumerate()
.filter(|(_, i)| match i.0 {
OpCode::Nop | OpCode::Jmp => true,
_ => false,
})
{
if let Ok(fixed_ae) = try_fix(input, instruction_idx) {
return Ok(fixed_ae.acc());
}
}
Err(anyhow!("Could not find corrupted instruction"))
}
fn parse(content: &str) -> Result<Self::InputType> {
content.parse()
}
}
fn try_fix(input: &AssemblyEmulator, instruction_idx: usize) -> Result<AssemblyEmulator> {
let mut fixed_ae: Vec<Instruction> = input.instructions().into();
let corrupted_instruction = input.instructions()[instruction_idx];
fixed_ae[instruction_idx].0 = match corrupted_instruction.0 {
OpCode::Nop => OpCode::Jmp,
OpCode::Jmp => OpCode::Nop,
_ => unreachable!(),
};
let mut fixed_ae = AssemblyEmulator::new(fixed_ae);
let mut encounterd_instructions = HashSet::new();
while fixed_ae.program_counter() != fixed_ae.instructions().len() {
if encounterd_instructions.contains(&fixed_ae.program_counter()) {
return Err(anyhow!("The fix doesn't work"));
}
encounterd_instructions.insert(fixed_ae.program_counter());
fixed_ae.step();
}
Ok(fixed_ae)
}
#[derive(Clone, Debug)]
pub struct AssemblyEmulator {
acc: i32,
instructions: Vec<Instruction>,
program_counter: usize,
}
impl AssemblyEmulator {
pub fn new(instructions: Vec<Instruction>) -> Self {
Self {
acc: 0,
program_counter: 0,
instructions,
}
}
pub fn acc(&self) -> i32 {
self.acc
}
pub fn instructions(&self) -> &[Instruction] {
&self.instructions
}
pub fn program_counter(&self) -> usize {
self.program_counter
}
pub fn step(&mut self) {
if self.program_counter >= self.instructions.len() {
panic!("Segmentation fault in the assembly emulator.")
}
match self.instructions[self.program_counter] {
Instruction(OpCode::Nop, _) => self.program_counter += 1,
Instruction(OpCode::Acc, i) => {
self.acc += i;
self.program_counter += 1
}
Instruction(OpCode::Jmp, i) => {
self.program_counter = (self.program_counter as i32 + i) as usize
}
}
}
}
impl FromStr for AssemblyEmulator {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self::new(crate::utils::parse_line_separated_list(s)?))
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Instruction(OpCode, i32);
impl FromStr for Instruction {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (opcode, param): (&str, &str) = s
.split(' ')
.collect_tuple()
.ok_or_else(|| anyhow!("Could not parse instruction"))?;
Ok(Self(opcode.parse()?, param.parse()?))
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum OpCode {
Nop,
Acc,
Jmp,
}
impl FromStr for OpCode {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"nop" => Ok(OpCode::Nop),
"acc" => Ok(OpCode::Acc),
"jmp" => Ok(OpCode::Jmp),
_ => Err(anyhow!("Unknwon instruction {}", s)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
const EXAMPLE: &str = "nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6";
#[test]
fn test_parse() {
let ae: AssemblyEmulator = EXAMPLE.parse().expect("Should parse");
assert_eq!(ae.instructions().len(), 9);
assert_eq!(ae.instructions()[0], Instruction(OpCode::Nop, 0));
}
#[test]
fn test_part1() {
assert_eq!(Day08::solve1(EXAMPLE).unwrap(), 5);
}
#[test]
fn test_part2() {
assert_eq!(Day08::solve2(EXAMPLE).unwrap(), 8);
}
}
crate::benchmark_challenge!(crate::day08::Day08);
|
use super::Control;
use error::UIError;
use std::ffi::{CStr, CString};
use std::mem;
use std::os::raw::c_int;
use ui::UI;
use ui_sys::{self, uiAlign, uiAt, uiBox, uiControl, uiGrid, uiGroup, uiSeparator, uiTab};
/// Defines the ways in which the children of boxes can be layed out.
pub enum LayoutStrategy {
/// Make the control the minimum possible size to contain its content
Compact,
/// Make the control expand to its maximum size
Stretchy,
}
define_control! {
/// Lays out its children vertically.
rust_type: VerticalBox,
sys_type: uiBox
}
define_control! {
/// Lays out its children horizontally.
rust_type: HorizontalBox,
sys_type: uiBox
}
impl VerticalBox {
/// Create a new vertical box layout.
pub fn new(_ctx: &UI) -> VerticalBox {
VerticalBox {
uiBox: unsafe { ui_sys::uiNewVerticalBox() },
}
}
}
impl HorizontalBox {
/// Create a new horizontal box layout.
pub fn new(_ctx: &UI) -> HorizontalBox {
HorizontalBox {
uiBox: unsafe { ui_sys::uiNewHorizontalBox() },
}
}
}
fn append<T: Into<Control>>(b: *mut uiBox, ctx: &UI, child: T, strategy: LayoutStrategy) {
let stretchy = match strategy {
LayoutStrategy::Compact => false,
LayoutStrategy::Stretchy => true,
};
let control = child.into();
unsafe {
assert!(ctx.parent_of(control.clone()).is_none());
ui_sys::uiBoxAppend(b, control.ui_control, stretchy as c_int)
}
}
fn padded(b: *mut uiBox, _ctx: &UI) -> bool {
unsafe { ui_sys::uiBoxPadded(b) != 0 }
}
fn set_padded(b: *mut uiBox, padded: bool, _ctx: &UI) {
unsafe { ui_sys::uiBoxSetPadded(b, padded as c_int) }
}
impl VerticalBox {
/// Add a control to the end of the box, sized by the given layout strategy.
pub fn append<T: Into<Control>>(&mut self, _ctx: &UI, child: T, strategy: LayoutStrategy) {
append(self.uiBox, _ctx, child, strategy)
}
/// Determine whenther the box provides padding around its children.
pub fn padded(&self, _ctx: &UI) -> bool {
padded(self.uiBox, _ctx)
}
/// Set whether or not the box should provide padding around its children.
pub fn set_padded(&mut self, _ctx: &UI, padded: bool) {
set_padded(self.uiBox, padded, _ctx)
}
}
impl HorizontalBox {
/// Add a control to the end of the box, sized by the given layout strategy.
pub fn append<T: Into<Control>>(&mut self, _ctx: &UI, child: T, strategy: LayoutStrategy) {
append(self.uiBox, _ctx, child, strategy)
}
/// Determine whenther the box provides padding around its children.
pub fn padded(&self, _ctx: &UI) -> bool {
padded(self.uiBox, _ctx)
}
/// Set whether or not the box should provide padding around its children.
pub fn set_padded(&mut self, _ctx: &UI, padded: bool) {
set_padded(self.uiBox, padded, _ctx)
}
}
define_control! {
/// Group of tabs, each of which shows a different sub-control.
rust_type: TabGroup,
sys_type: uiTab
}
define_control! {
/// Collects controls together, with (optionally) a margin and/or title.
rust_type: Group,
sys_type: uiGroup
}
impl Group {
/// Create a new group with the given title.
pub fn new(_ctx: &UI, title: &str) -> Group {
let mut group = unsafe {
let c_string = CString::new(title.as_bytes().to_vec()).unwrap();
Group::from_raw(ui_sys::uiNewGroup(c_string.as_ptr()))
};
group.set_margined(_ctx, true);
group
}
/// Get a copy of the current group title.
pub fn title(&self, _ctx: &UI) -> String {
unsafe {
CStr::from_ptr(ui_sys::uiGroupTitle(self.uiGroup))
.to_string_lossy()
.into_owned()
}
}
/// Get a reference to the existing group title.
pub fn title_ref(&self, _ctx: &UI) -> &CStr {
unsafe { CStr::from_ptr(ui_sys::uiGroupTitle(self.uiGroup)) }
}
// Set the group's title.
pub fn set_title(&mut self, _ctx: &UI, title: &str) {
unsafe {
let c_string = CString::new(title.as_bytes().to_vec()).unwrap();
ui_sys::uiGroupSetTitle(self.uiGroup, c_string.as_ptr())
}
}
// Set the group's child widget.
pub fn set_child<T: Into<Control>>(&mut self, _ctx: &UI, child: T) {
unsafe { ui_sys::uiGroupSetChild(self.uiGroup, child.into().ui_control) }
}
// Check whether or not the group draws a margin.
pub fn margined(&self, _ctx: &UI) -> bool {
unsafe { ui_sys::uiGroupMargined(self.uiGroup) != 0 }
}
// Set whether or not the group draws a margin.
pub fn set_margined(&mut self, _ctx: &UI, margined: bool) {
unsafe { ui_sys::uiGroupSetMargined(self.uiGroup, margined as c_int) }
}
}
impl TabGroup {
/// Create a new, empty group of tabs.
pub fn new(_ctx: &UI) -> TabGroup {
unsafe { TabGroup::from_raw(ui_sys::uiNewTab()) }
}
/// Add the given control as a new tab in the tab group with the given name.
///
/// Returns the number of tabs in the group after adding the new tab.
pub fn append<T: Into<Control>>(&mut self, _ctx: &UI, name: &str, control: T) -> i32 {
let control = control.into();
unsafe {
let c_string = CString::new(name.as_bytes().to_vec()).unwrap();
ui_sys::uiTabAppend(self.uiTab, c_string.as_ptr(), control.ui_control);
ui_sys::uiTabNumPages(self.uiTab) as i32
}
}
/// Add the given control before the given index in the tab group, as a new tab with a given name.
///
/// Returns the number of tabs in the group after adding the new tab.
pub fn insert_at<T: Into<Control>>(
&mut self,
_ctx: &UI,
name: &str,
before: i32,
control: T,
) -> i32 {
unsafe {
let c_string = CString::new(name.as_bytes().to_vec()).unwrap();
ui_sys::uiTabInsertAt(
self.uiTab,
c_string.as_ptr(),
before,
control.into().ui_control,
);
ui_sys::uiTabNumPages(self.uiTab) as i32
}
}
/// Remove the control at the given index in the tab group.
///
/// Returns the number of tabs in the group after removing the tab, or an error if that index was out of bounds.
///
/// NOTE: This will leak the deleted control! We have no way of actually getting it
/// to decrement its reference count per `libui`'s UI as of today, unless we maintain a
/// separate list of children ourselves…
pub fn delete(&mut self, _ctx: &UI, index: i32) -> Result<i32, UIError> {
let n = unsafe { ui_sys::uiTabNumPages(self.uiTab) as i32 };
if index < n {
unsafe { ui_sys::uiTabDelete(self.uiTab, index) };
Ok(n)
} else {
Err(UIError::TabGroupIndexOutOfBounds { index, n })
}
}
/// Determine whether or not the tab group provides margins around its children.
pub fn margined(&self, _ctx: &UI, page: i32) -> bool {
unsafe { ui_sys::uiTabMargined(self.uiTab, page) != 0 }
}
/// Set whether or not the tab group provides margins around its children.
pub fn set_margined(&mut self, _ctx: &UI, page: i32, margined: bool) {
unsafe { ui_sys::uiTabSetMargined(self.uiTab, page, margined as c_int) }
}
}
define_control! {
/// Horizontal line, to seperate things visually.
rust_type: HorizontalSeparator,
sys_type: uiSeparator
}
impl HorizontalSeparator {
pub fn new(_ctx: &UI) -> Self {
unsafe { HorizontalSeparator::from_raw(ui_sys::uiNewHorizontalSeparator()) }
}
}
define_control! {
/// Seperates components with empty space.
rust_type: Spacer,
sys_type: uiBox
}
impl Spacer {
pub fn new(_ctx: &UI) -> Self {
unsafe { Spacer::from_raw(ui_sys::uiNewHorizontalBox()) }
}
}
/// Informs a `LayoutGrid` about how a control should use available space
/// in one or both dimensions.
pub enum GridExpand {
/// This control should not use extra space
Neither,
/// This control should use extra space horizontally
Horizontal,
/// This control should use extra space vertically
Vertical,
/// This control should use all available space in both dimensions
Both,
}
/// Informs a `LayoutGrid` how to align a control.
#[derive(Clone, Copy, PartialEq)]
pub enum GridAlignment {
/// Expand to use all available space.
Fill,
/// Collapse toward the start of the available space.
Start,
/// Collapse equally on both sides of the available space.
Center,
/// Collapse toward the end of the available space.
End,
}
impl GridAlignment {
fn into_ui_align(self) -> uiAlign {
use self::GridAlignment::*;
return match self {
Fill => ui_sys::uiAlignFill,
Start => ui_sys::uiAlignStart,
Center => ui_sys::uiAlignCenter,
End => ui_sys::uiAlignEnd,
} as uiAlign;
}
}
/// Informs a `LayoutGrid` as to position a control.
#[derive(Clone, Copy, PartialEq)]
pub enum GridInsertionStrategy {
/// Place control to left of existing control, align tops
Leading,
/// Place control above existing control, align left edges
Top,
/// Place control to right of existing control, align tops
Trailing,
/// Place control below existing control, align left edges
Bottom,
}
impl GridInsertionStrategy {
fn into_ui_at(self) -> uiAt {
use self::GridInsertionStrategy::*;
return match self {
Leading => ui_sys::uiAtLeading,
Top => ui_sys::uiAtTop,
Trailing => ui_sys::uiAtTrailing,
Bottom => ui_sys::uiAtBottom,
} as uiAlign;
}
}
define_control! {
/// Lays out its children in a grid according to insertion instructions.
rust_type: LayoutGrid,
sys_type: uiGrid
}
impl LayoutGrid {
/// Creates a new `LayoutGrid`.
pub fn new(_ctx: &UI) -> Self {
unsafe { LayoutGrid::from_raw(ui_sys::uiNewGrid()) }
}
/// Returns `true` if the `LayoutGrid` is padded and `false` if not.
pub fn padded(&self, _ctx: &UI) -> bool {
if unsafe { ui_sys::uiGridPadded(self.uiGrid) } == 0 {
true
} else {
false
}
}
/// Sets the padding state of the `LayoutGrid`
pub fn set_padded(&mut self, _ctx: &UI, padded: bool) {
let v = if padded { 1 } else { 0 };
unsafe {
ui_sys::uiGridSetPadded(self.uiGrid, v);
}
}
/// Adds a control to the `LayoutGrid`.
pub fn append<T: Into<Control>>(
&mut self,
_ctx: &UI,
control: T,
left: i32,
height: i32,
xspan: i32,
yspan: i32,
expand: GridExpand,
halign: GridAlignment,
valign: GridAlignment,
) {
let (hexpand, vexpand) = match expand {
GridExpand::Neither => (0, 0),
GridExpand::Horizontal => (1, 0),
GridExpand::Vertical => (0, 1),
GridExpand::Both => (1, 1),
};
unsafe {
ui_sys::uiGridAppend(
self.uiGrid,
control.into().ui_control,
left,
height,
xspan,
yspan,
hexpand,
halign.into_ui_align(),
vexpand,
valign.into_ui_align(),
);
}
}
/// Inserts a control in to the `LayoutGrid` relative to an existing control.
pub fn insert_at<T: Into<Control>, U: Into<Control>>(
&mut self,
_ctx: &UI,
control: T,
existing: U,
at: GridInsertionStrategy,
xspan: i32,
yspan: i32,
expand: GridExpand,
halign: GridAlignment,
valign: GridAlignment,
) {
let (hexpand, vexpand) = match expand {
GridExpand::Neither => (0, 0),
GridExpand::Horizontal => (1, 0),
GridExpand::Vertical => (0, 1),
GridExpand::Both => (1, 1),
};
unsafe {
ui_sys::uiGridInsertAt(
self.uiGrid,
control.into().ui_control,
existing.into().ui_control,
at.into_ui_at(),
xspan,
yspan,
hexpand,
halign.into_ui_align(),
vexpand,
valign.into_ui_align(),
);
}
}
}
|
use vec_map::VecMap;
use std::mem;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use piston::input::Button;
use graphics::Context;
use opengl_graphics::{GlGraphics, Texture};
use super::{Unit, Grid, Controller};
use controller::{LocalController, DummyController, AiController};
pub struct Game {
pub grid: Grid,
pub units: VecMap<Unit>,
pub frame: u64,
pub mouse: (f64, f64),
pub selected_idx: Option<usize>,
pub teams: Vec<Team>,
pub current_team: u16,
pub textures: Vec<Texture>,
pub done: bool,
pub undo: Vec<UndoState>,
curr_units: Vec<usize>,
}
pub struct UndoState {
grid: Grid,
units: VecMap<Unit>,
selected_idx: Option<usize>,
}
impl Game {
pub fn save(&mut self) {
self.undo.push(UndoState {
grid: self.grid.clone(),
units: self.units.clone(),
selected_idx: self.selected_idx,
});
}
pub fn save_with(&mut self, unit: Unit) {
let idx = *self.curr_units.last().unwrap();
let mut units = self.units.clone();
units.insert(idx, unit);
self.undo.push(UndoState {
grid: self.grid.clone(),
units: units,
selected_idx: self.selected_idx,
});
}
pub fn undo(&mut self) {
if let Some(UndoState { grid, units, selected_idx }) = self.undo.pop() {
self.grid = grid;
self.units = units;
self.selected_idx = selected_idx;
self.done = false;
}
}
pub fn sample() -> Game {
let mut f = File::open(Path::new("levels/test.sunrise")).unwrap();
let mut s = String::new();
f.read_to_string(&mut s).unwrap();
let (grid, units) = Grid::from_string(&s);
Game {
grid: grid,
units: units,
frame: 0,
mouse: (0.0, 0.0),
selected_idx: None,
teams: vec![
Team { name: "Player".into(), controller: Box::new(LocalController) },
Team { name: "Enemy".into(), controller: Box::new(AiController::new()) },
],
current_team: 0,
textures: vec![
Texture::from_memory_alpha(&[], 0, 0).unwrap(),
Texture::from_path(&Path::new("./assets/hack2.png")).unwrap(),
Texture::from_path(&Path::new("./assets/lightning.png")).unwrap(),
Texture::from_path(&Path::new("./assets/warden.png")).unwrap(),
Texture::from_path(&Path::new("./assets/crosshair.png")).unwrap(),
],
done: false,
curr_units: vec![],
undo: vec![],
}
}
pub fn for_unit<F>(&mut self, idx: usize, f: F) where F: FnOnce(&mut Unit, &mut Game) {
let mut unit = self.units.remove(&idx).unwrap();
self.curr_units.push(idx);
f(&mut unit, self);
self.curr_units.pop();
self.units.insert(idx, unit);
}
pub fn for_each_unit<F>(&mut self, mut f: F) where F: FnMut(&mut Unit, &mut Game, usize) {
let iter = self.units.keys().collect::<Vec<_>>().into_iter();
for idx in iter {
let mut unit = self.units.remove(&idx).unwrap();
self.curr_units.push(idx);
f(&mut unit, self, idx);
self.curr_units.pop();
self.units.insert(idx, unit);
}
}
pub fn for_grid<F>(&mut self, f: F) where F: FnOnce(&mut Grid, &mut Game) {
let mut grid = mem::replace(&mut self.grid, Grid::dummy());
f(&mut grid, self);
self.grid = grid;
}
pub fn for_current_team<F>(&mut self, f: F) where F: FnOnce(&mut Team, &mut Game) {
let cur = self.current_team as usize;
let mut team = mem::replace(&mut self.teams[cur],
Team { name: "Dummy".into(),
controller: Box::new(DummyController) });
f(&mut team, self);
self.teams[cur] = team;
}
pub fn is_valid(&self, x: i16, y: i16) -> bool {
self.grid.is_valid(x, y)
&& self.units.values().all(|a| !a.occupies(x, y))
}
pub fn select(&mut self, unit_idx: usize) {
self.deselect();
self.selected_idx = Some(unit_idx);
self.units[unit_idx].selected = true;
self.for_unit(unit_idx, |unit, game| {
unit.highlight(game);
});
self.done = false;
}
pub fn select_team(&mut self, team_idx: u16) {
self.undo.clear();
self.for_each_unit(|unit, _, _| {
unit.moves = unit.move_limit;
unit.has_attacked = false;
unit.attack = None;
});
let idx = self.units.iter().find(|&(_, ref x)| x.team == team_idx).unwrap().0;
self.select(idx);
self.current_team = team_idx;
self.done = false;
}
pub fn next_team(&mut self) {
let idx = self.current_team;
let len = self.teams.len() as u16;
if let Some(idx) = self.selected_idx {
self.for_unit(idx, |unit, game| {
unit.leave_attack(game);
});
}
self.select_team((idx + 1) % len);
}
pub fn attack(&mut self, unit_idx: usize, attack: u16) {
self.for_unit(unit_idx, |unit, game| {
if unit.attack.is_some() {
game.undo.pop();
unit.leave_attack(game);
} else {
game.save_with(unit.clone());
unit.attack(game, attack);
}
});
}
pub fn fire(&mut self, unit_idx: usize) {
self.for_unit(unit_idx, |unit, game| {
unit.fire(game);
});
if self.units[unit_idx].parts.len() == 0 {
self.units.remove(&unit_idx);
let idx = self.units.iter().find(|&(_, ref x)| x.is_player(self)).map(|(i, _)| i);
if let Some(idx) = idx {
self.select(idx);
self.for_unit(idx, |unit, game| {
unit.highlight(game);
});
}
}
}
pub fn select_next(&mut self) {
if let Some(idx) = self.selected_idx {
if self.units[idx].attack.is_none() {
let mut keys: Vec<_> = self.units.keys().collect();
keys.sort();
let mut idx = keys.iter().position(|x| *x == idx).unwrap();
loop {
idx += 1;
idx %= keys.len();
if self.units[keys[idx]].is_player(self) { break }
}
self.select(keys[idx] as usize);
}
}
}
pub fn deselect(&mut self) {
if let Some(idx) = self.selected_idx {
self.units[idx].selected = false;
}
self.selected_idx = None;
}
pub fn clear_highlight(&mut self) {
self.grid.highlight.iter_mut().map(|x| *x = 0).count();
self.grid.attack_hi.iter_mut().map(|x| *x = 0).count();
self.grid.player_pos = None;
}
pub fn handle_press(&mut self, args: Button) {
match args {
// TODO: handle pause, etc.
args => {
self.for_current_team(|team, game| {
team.controller.handle_press(game, args);
});
},
}
}
pub fn handle_mouse(&mut self, x: f64, y: f64) {
self.mouse = (x, y);
self.for_current_team(|team, game| {
team.controller.handle_mouse(game);
});
}
pub fn handle_frame(&mut self) {
self.frame += 1;
self.for_current_team(|team, game| {
team.controller.handle_frame(game);
});
}
pub fn draw(&mut self, c: &Context, gl: &mut GlGraphics) {
use graphics::*;
clear([0.0, 0.0, 0.0, 1.0], gl);
self.for_grid(|grid, game| {
grid.draw(game, &c, gl);
});
self.for_each_unit(|unit, game, _| {
unit.draw(game, &c, gl);
});
self.for_grid(|grid, game| {
grid.draw_overlay(game, &c, gl);
});
}
}
pub struct Team {
pub name: String,
controller: Box<Controller>,
}
|
v1_imports!();
use rocket::{Route, State};
use authn::{AuthnBackend, AuthnHolder};
use config::Config;
pub fn get_routes() -> Vec<Route> {
routes![get_meta]
}
#[allow(needless_pass_by_value)]
#[get("/meta")]
pub fn get_meta(auth: State<AuthnHolder>, conf: State<Config>) -> Result<String, ErrorResponse> {
let authn_prov = conf.get_authn_provider();
let mut v = if authn_prov == "aad" {
// Convert Azure AD to OpenID (since the frontend has no concept of Azure AD)
json!({ "auth": "openid" })
} else {
json!({ "auth": authn_prov })
};
auth.add_to_client_meta(&mut v);
Ok(v.to_string())
}
|
use wasm_bindgen::prelude::*;
use wasm_bindgen::convert::{OptionIntoWasmAbi, IntoWasmAbi, FromWasmAbi};
use crate::bigint_256::{self, WasmBigInteger256};
use algebra::biginteger::BigInteger256;
use algebra::{ToBytes, FromBytes};
use mina_curves::pasta::fq::{Fq, FqParameters as Fq_params};
use algebra::{
fields::{Field, FpParameters, PrimeField, SquareRootField},
FftField, One, UniformRand, Zero,
};
use ff_fft::{EvaluationDomain, Radix2EvaluationDomain as Domain};
use num_bigint::BigUint;
use rand::rngs::StdRng;
use std::cmp::Ordering::{Equal, Greater, Less};
pub struct WasmPastaFq(pub Fq);
impl wasm_bindgen::describe::WasmDescribe for WasmPastaFq {
fn describe() { <Vec<u8> as wasm_bindgen::describe::WasmDescribe>::describe() }
}
impl FromWasmAbi for WasmPastaFq {
type Abi = <Vec<u8> as FromWasmAbi>::Abi;
unsafe fn from_abi(js: Self::Abi) -> Self {
let bytes: Vec<u8> = FromWasmAbi::from_abi(js);
WasmPastaFq(FromBytes::read(bytes.as_slice()).unwrap())
}
}
impl IntoWasmAbi for WasmPastaFq {
type Abi = <Vec<u8> as FromWasmAbi>::Abi;
fn into_abi(self) -> Self::Abi {
let mut bytes: Vec<u8> = vec![];
self.0.write(&mut bytes);
bytes.into_abi()
}
}
impl OptionIntoWasmAbi for WasmPastaFq {
fn none() -> Self::Abi {
let max_bigint = WasmBigInteger256(BigInteger256([u64::MAX, u64::MAX, u64::MAX, u64::MAX]));
max_bigint.into_abi()
}
}
#[wasm_bindgen]
pub fn caml_pasta_fq_size_in_bits() -> isize {
Fq_params::MODULUS_BITS as isize
}
#[wasm_bindgen]
pub fn caml_pasta_fq_size() -> WasmBigInteger256 {
WasmBigInteger256(Fq_params::MODULUS)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_add(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(x.0 + y.0)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_sub(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(x.0 - y.0)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_negate(x: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(-x.0)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_mul(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(x.0 * y.0)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_div(x: WasmPastaFq, y: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(x.0 / y.0)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_inv(x: WasmPastaFq) -> Option<WasmPastaFq> {
x.0.inverse().map(|x| { WasmPastaFq(x) })
}
#[wasm_bindgen]
pub fn caml_pasta_fq_square(x: WasmPastaFq) -> WasmPastaFq {
WasmPastaFq(x.0.square())
}
#[wasm_bindgen]
pub fn caml_pasta_fq_is_square(x: WasmPastaFq) -> bool {
let s = x.0.pow(Fq_params::MODULUS_MINUS_ONE_DIV_TWO);
s.is_zero() || s.is_one()
}
#[wasm_bindgen]
pub fn caml_pasta_fq_sqrt(x: WasmPastaFq) -> Option<WasmPastaFq> {
x.0.sqrt().map(|x| { WasmPastaFq(x) })
}
#[wasm_bindgen]
pub fn caml_pasta_fq_of_int(i: i32) -> WasmPastaFq {
WasmPastaFq(Fq::from(i as u64))
}
#[wasm_bindgen]
pub fn caml_pasta_fq_to_string(x: WasmPastaFq) -> String {
bigint_256::to_biguint(&x.0.into_repr()).to_string()
}
#[wasm_bindgen]
pub fn caml_pasta_fq_of_string(s: String) -> WasmPastaFq {
match BigUint::parse_bytes(&s.into_bytes(), 10) {
Some(data) => WasmPastaFq(Fq::from_repr(bigint_256::of_biguint(&data))),
None => panic!("caml_pasta_fq_of_string"),
}
}
#[wasm_bindgen]
pub fn caml_pasta_fq_print(x: WasmPastaFq) {
println!("{}", bigint_256::to_biguint(&(x.0.into_repr())));
}
#[wasm_bindgen]
pub fn caml_pasta_fq_compare(x: WasmPastaFq, y: WasmPastaFq) -> i32 {
match x.0.cmp(&y.0) {
Less => -1,
Equal => 0,
Greater => 1,
}
}
#[wasm_bindgen]
pub fn caml_pasta_fq_equal(x: WasmPastaFq, y: WasmPastaFq) -> bool {
x.0 == y.0
}
#[wasm_bindgen]
pub fn caml_pasta_fq_random() -> WasmPastaFq {
WasmPastaFq(UniformRand::rand(&mut rand::thread_rng()))
}
#[wasm_bindgen]
pub fn caml_pasta_fq_rng(i: i32) -> WasmPastaFq {
// We only care about entropy here, so we force a conversion i32 -> u32.
let i: u64 = (i as u32).into();
let mut rng: StdRng = rand::SeedableRng::seed_from_u64(i);
WasmPastaFq(UniformRand::rand(&mut rng))
}
#[wasm_bindgen]
pub fn caml_pasta_fq_to_bigint(x: WasmPastaFq) -> WasmBigInteger256 {
WasmBigInteger256(x.0.into_repr())
}
#[wasm_bindgen]
pub fn caml_pasta_fq_of_bigint(x: WasmBigInteger256) -> WasmPastaFq {
WasmPastaFq(Fq::from_repr(x.0))
}
#[wasm_bindgen]
pub fn caml_pasta_fq_two_adic_root_of_unity() -> WasmPastaFq {
WasmPastaFq(FftField::two_adic_root_of_unity())
}
#[wasm_bindgen]
pub fn caml_pasta_fq_domain_generator(log2_size: i32) -> WasmPastaFq {
match Domain::new(1 << log2_size) {
Some(x) => WasmPastaFq(x.group_gen),
None => panic!("caml_pasta_fq_domain_generator"),
}
}
#[wasm_bindgen]
pub fn caml_pasta_fq_to_bytes(x: WasmPastaFq) -> Vec<u8> {
let len = std::mem::size_of::<Fq>();
let mut str: Vec<u8> = Vec::with_capacity(len);
str.resize(len, 0);
let str_as_fq : *mut Fq = str.as_mut_ptr().cast::<Fq>();
unsafe {
*str_as_fq = x.0;
}
str
}
#[wasm_bindgen]
pub fn caml_pasta_fq_of_bytes(x: &[u8]) -> WasmPastaFq {
let len = std::mem::size_of::<Fq>();
if x.len() != len {
panic!("caml_pasta_fq_of_bytes");
};
let x = unsafe { *(x.as_ptr() as *const Fq) };
WasmPastaFq(x)
}
#[wasm_bindgen]
pub fn caml_pasta_fq_deep_copy(x: WasmPastaFq) -> WasmPastaFq {
x
}
|
// TODO: think about ownership, examine when passing ownership
// TODO: rustfmt setting to not make structs so verbose..
use pretty_env_logger;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use std::time::Duration;
use std::{fmt::Display, fs::File};
use anyhow::{Context, Result};
use lazy_static::lazy_static;
use log;
use regex::{Captures, Regex};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Opt {
#[structopt(parse(from_os_str))]
input: PathBuf,
#[structopt(short, long, default_value = "30")]
num_tasks: usize,
}
#[derive(Debug, Eq, PartialOrd, PartialEq)]
struct TaskTime {
duration: std::time::Duration,
task: String,
line_num: usize,
}
impl Ord for TaskTime {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.duration.cmp(&other.duration)
}
}
impl Display for TaskTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// TODO: ansi color/bold stuff?
// TODO: task padding a bit weird.
write!(
f,
"{:>7} for [{:<32}] (line {})",
human_duration(&self.duration),
self.task,
self.line_num
)
}
}
// TODO: unit tests please
fn human_duration(d: &Duration) -> String {
let (mut h, mut m) = (0, 0);
let mut ds = d.as_secs();
log::debug!("hum_dur {:?} start...", ds);
if ds > SECS_IN_HOUR {
h += ds / SECS_IN_HOUR;
ds = ds % SECS_IN_HOUR;
log::debug!("hum_dur hours h={}, ds={}", h, ds);
}
if ds > SECS_IN_MINUTE {
m += ds / SECS_IN_MINUTE;
ds = ds % SECS_IN_MINUTE;
log::debug!("hum_dur minutes m={}, ds={}", m, ds);
}
// todo: making generic is tricky
fn ifne0(val: u64, suf: &str) -> String {
if val > 0 {
format!("{}{}", val, suf)
} else {
String::from("")
}
}
let r = format!("{}{}{}s", ifne0(h, "h"), ifne0(m, "m"), ds);
log::debug!("hum_dur final: h={} m={} s={} -> {}", h, m, ds, r);
r
}
fn main() -> Result<()> {
// std::env::set_var("RUST_LOG", "info");
pretty_env_logger::init();
let opt = Opt::from_args();
log::info!("options: {:?}", opt);
let reader = BufReader::new(File::open(opt.input)?);
let mut task_times = process_ansible_log(reader)?;
println!("# tasks: {:?}", task_times.len());
let total_time = task_times.iter().map(|tt| tt.duration).sum::<Duration>();
println!("total task times: {}", human_duration(&total_time));
println!("top task times:");
println!(" accum. | task | task");
println!(" time | time | description");
println!("--------------------------------------");
task_times.sort();
task_times.reverse();
let mut cumulative_time = Duration::from_secs(0);
for tt in task_times.iter().take(opt.num_tasks) {
cumulative_time = cumulative_time + tt.duration;
println!(
"{:>8} | {:>7} | {} (line {})",
human_duration(&cumulative_time),
human_duration(&tt.duration),
tt.task,
tt.line_num
);
}
Ok(())
}
// returns TaskTimes in original chronological order
fn process_ansible_log(reader: BufReader<File>) -> Result<Vec<TaskTime>> {
let mut processor = LogProcessor::default();
for (line_num, line) in reader.lines().enumerate() {
let line = line?;
let line_num = line_num + 1;
if let Some(start_cap) = TASK_START.captures(&line) {
let task: String = start_cap.get(1).context("new task line")?.as_str().into();
processor.transition(ParseEvent::TaskStart { task }, line_num);
} else if let Some(end_cap) = TASK_DURATION.captures(&line) {
let total_duration = parse_task_duration_line(end_cap)?;
processor.transition(ParseEvent::TaskTime { total_duration }, line_num);
}
// skip over all other lines.
}
processor.end();
Ok(processor.task_times)
}
// state/impl for transitioning between ParseStates with ParseEvents, and accumulating task_times.
#[derive(Default)]
struct LogProcessor {
state: ParseState,
prev_task_end_duration: Duration,
task_times: Vec<TaskTime>,
}
#[derive(Debug)]
enum ParseState {
Start,
HaveTask {
task: String,
line_num: usize,
},
HaveTaskTime {
task: String,
line_num: usize,
total_duration: Duration,
},
}
impl Default for ParseState {
fn default() -> Self {
ParseState::Start
}
}
enum ParseEvent {
TaskStart { task: String },
TaskTime { total_duration: Duration },
}
impl LogProcessor {
fn transition(&mut self, ev: ParseEvent, line_num: usize) {
use ParseEvent as PEvent;
use ParseState as PState;
// take state value to reuse the inner values. all branches must reassign self.state,
// because it's been replaced with a default value of Start.
let state = std::mem::take(&mut self.state);
match (state, ev) {
(PState::Start, PEvent::TaskStart { task }) => {
self.state = ParseState::HaveTask { task, line_num };
log::debug!("-> (initial) {:?}", self.state);
}
(PState::Start, PEvent::TaskTime { total_duration }) => {
if self.task_times.is_empty() {
log::debug!(
".. skipping initial task duration b/c had no task: {:?}",
total_duration
);
} else {
panic!("!! task duration without task start? {}", line_num);
}
// Note: Start is the default value populated by take so technically unnecessary,
// just being explicit.
self.state = PState::Start;
}
(PState::HaveTask { task: prev, .. }, PEvent::TaskStart { task }) => {
log::debug!(
"⤿ got another task {} (assuming previous task was skipped {})",
task,
prev
);
self.state = PState::HaveTask { task, line_num };
}
(PState::HaveTask { task, line_num }, PEvent::TaskTime { total_duration }) => {
self.state = PState::HaveTaskTime {
task,
line_num,
total_duration,
};
log::debug!("-> {:?}", self.state);
}
(
PState::HaveTaskTime {
task: prev_task,
line_num: start_line_num,
total_duration,
},
PEvent::TaskStart { task: next_task },
) => {
let this_task_duration;
if total_duration >= self.prev_task_end_duration {
// this task's duration is the delta of the last duration
// stamp minus the previous task's ending duration.
this_task_duration = total_duration - self.prev_task_end_duration;
} else {
log::warn!(
"note: got negative duration delta ({:?} -> {:?}), using 0 instead. {}",
self.prev_task_end_duration,
total_duration,
if total_duration.as_secs() == 0 {
"latest value = 0, so guessing there are 2 ansible runs in this log?"
} else {
"latest value non-zero. very unexpected!!"
}
);
// TODO: i think this should actually be just total_duration right?
this_task_duration = Duration::new(0, 0);
}
self.task_times.push(TaskTime {
task: prev_task,
duration: this_task_duration,
line_num: start_line_num,
});
log::info!("++ completed task: {:?}", self.task_times.last().unwrap());
self.prev_task_end_duration = total_duration;
self.state = ParseState::HaveTask {
task: next_task,
line_num,
};
log::debug!("-> {:?}", self.state);
}
(
PState::HaveTaskTime {
task,
line_num,
total_duration: prev,
},
PEvent::TaskTime { total_duration },
) => {
// this can happen when a task executes on multiple hosts,
// and so there are multiple task duration lines within new
// task lines in between. we want the last task duration
// value in the series, so we just update the stored
// duration while staying in the same state.
log::debug!(
"-> HaveTaskTime updating task {} duration {:?} to {:?}",
task,
prev,
total_duration
);
self.state = ParseState::HaveTaskTime {
task,
line_num,
total_duration,
};
}
}
}
fn end(&mut self) {
// handle any leftover state. take to own state data. after this is called, state is reset
// to Start. would be more correct to assign to "End" value or something, but meh.
let state = std::mem::take(&mut self.state);
match state {
ParseState::Start => log::error!("no data?"),
ParseState::HaveTask { task, line_num } => log::debug!(
"missing time for task {} (line {}), skipped?",
task,
line_num
),
ParseState::HaveTaskTime {
task,
line_num,
total_duration,
} => {
self.task_times.push(TaskTime {
task,
line_num,
duration: total_duration - self.prev_task_end_duration,
});
log::info!("++ final tasktime: {:?}", self.task_times.last().unwrap());
}
}
}
}
const SECS_IN_MINUTE: u64 = 60;
const SECS_IN_HOUR: u64 = 60 * SECS_IN_MINUTE;
const SECS_IN_DAY: u64 = 24 * SECS_IN_HOUR;
lazy_static! {
static ref TASK_START: Regex =
Regex::new(r"^(?:TASK|RUNNING HANDLER) \[(.+)\] \*{3}\**").unwrap();
}
lazy_static! {
static ref TASK_DURATION: Regex =
Regex::new(r"^Task run took (\d+) days, (\d+) hours, (\d+) minutes, (\d+) seconds")
.unwrap();
}
fn parse_task_duration_line(cap: Captures) -> Result<std::time::Duration> {
// Regex::new(r"^Task run took (\d+) days, (\d+) hours, (\d+) minutes, (\d+) seconds")
// TODO: how to capture outer thing in a nested fn?
// fn helper(cap_index: usize, desc: &str, sec_mult: u64) -> Result<Duration> {
fn helper(
cap: &Captures,
cap_index: usize,
desc: &'static str,
sec_mult: u64,
) -> Result<Duration> {
// TODO: how come opt.ok_or("msg")? doesn't work, but opt.context("msg")? does?
// TODO: why did i need to break this up?
let mut num: u64 = cap.get(cap_index).context(desc)?.as_str().parse()?;
// Note: seems like ansible has a bug when time crosses 1hr mark, seconds has extra 3600.
if cap_index == 4 && num >= 3600 {
log::warn!(
"ok, seconds value > 3600, assume that's an ansible bug. removing. {:?}",
cap
);
num -= 3600;
}
Ok(Duration::from_secs(num * sec_mult))
}
Ok(helper(&cap, 1, "duration days", SECS_IN_DAY)?
+ helper(&cap, 2, "duration hours", SECS_IN_HOUR)?
+ helper(&cap, 3, "duration minutes", SECS_IN_MINUTE)?
+ helper(&cap, 4, "duration seconds", 1)?)
}
|
use alloc::vec::Vec;
#[cfg(feature = "std")]
use std::io::{self, Write};
#[inline]
pub(crate) fn is_alphanumeric(e: u8) -> bool {
(b'0'..=b'9').contains(&e) || (b'a'..=b'z').contains(&e) || (b'A'..=b'Z').contains(&e)
}
#[inline]
pub(crate) fn write_hex_to_vec(e: u8, output: &mut Vec<u8>) {
output.reserve(6);
let length = output.len();
unsafe {
output.set_len(length + 6);
}
let output = &mut output[length..];
output[0] = b'&';
output[1] = b'#';
output[2] = b'x';
output[5] = b';';
let he = e >> 4;
let le = e & 0xF;
output[3] = if he >= 10 {
b'A' - 10 + he
} else {
b'0' + he
};
output[4] = if le >= 10 {
b'A' - 10 + le
} else {
b'0' + le
};
}
#[cfg(feature = "std")]
#[inline]
pub(crate) fn write_hex_to_writer<W: Write>(e: u8, output: &mut W) -> Result<(), io::Error> {
output.write_fmt(format_args!("&#x{:02X};", e))
}
#[inline]
pub(crate) fn write_html_entity_to_vec(e: u8, output: &mut Vec<u8>) {
match e {
b'&' => output.extend_from_slice(b"&"),
b'<' => output.extend_from_slice(b"<"),
b'>' => output.extend_from_slice(b">"),
b'"' => output.extend_from_slice(b"""),
_ => write_hex_to_vec(e, output),
}
}
#[cfg(feature = "std")]
#[inline]
pub(crate) fn write_html_entity_to_writer<W: Write>(
e: u8,
output: &mut W,
) -> Result<(), io::Error> {
match e {
b'&' => output.write_all(b"&"),
b'<' => output.write_all(b"<"),
b'>' => output.write_all(b">"),
b'"' => output.write_all(b"""),
_ => write_hex_to_writer(e, output),
}
}
#[inline]
pub(crate) fn write_char_to_vec(c: char, output: &mut Vec<u8>) {
let width = c.len_utf8();
output.reserve(width);
let current_length = output.len();
unsafe {
output.set_len(current_length + width);
}
c.encode_utf8(&mut output[current_length..]);
}
#[cfg(feature = "std")]
#[inline]
pub(crate) fn write_char_to_writer<W: Write>(c: char, output: &mut W) -> Result<(), io::Error> {
let mut buffer = [0u8; 4];
let length = c.encode_utf8(&mut buffer).len();
output.write_all(&buffer[..length])
}
|
mod error;
use std::io::{self, Read};
use env_logger::Builder;
use log::LevelFilter;
use nmstate::NetworkState;
use serde::Serialize;
use serde_yaml::{self, Value};
use crate::error::CliError;
const SUB_CMD_GEN_CONF: &str = "gc";
const SUB_CMD_SHOW: &str = "show";
const SUB_CMD_APPLY: &str = "apply";
fn main() {
let matches = clap::App::new("nmstatectl")
.version("1.0")
.author("Gris Ge <fge@redhat.com>")
.about("Command line of nmstate")
.setting(clap::AppSettings::SubcommandRequired)
.arg(
clap::Arg::with_name("verbose")
.short("v")
.multiple(true)
.help("Set verbose level"),
)
.subcommand(
clap::SubCommand::with_name(SUB_CMD_SHOW)
.about("Show network state")
.arg(
clap::Arg::with_name("IFNAME")
.index(1)
.help("Show speific interface only"),
)
.arg(
clap::Arg::with_name("KERNEL")
.short("k")
.long("kernel")
.takes_value(false)
.help("Show kernel network state only"),
),
)
.subcommand(
clap::SubCommand::with_name(SUB_CMD_APPLY)
.about("Apply network state")
.arg(
clap::Arg::with_name("STATE_FILE")
.required(false)
.index(1)
.help("Network state file"),
)
.arg(
clap::Arg::with_name("KERNEL")
.short("k")
.long("kernel")
.takes_value(false)
.help("Apply network state to kernel only"),
),
)
.subcommand(
clap::SubCommand::with_name(SUB_CMD_GEN_CONF)
.about("Generate network configuration for specified state")
.arg(
clap::Arg::with_name("STATE_FILE")
.required(true)
.index(1)
.help("Network state file"),
),
)
.get_matches();
let (log_module_filters, log_level) =
match matches.occurrences_of("verbose") {
0 => (vec!["nmstate", "nm_dbus"], LevelFilter::Warn),
1 => (vec!["nmstate", "nm_dbus"], LevelFilter::Info),
2 => (vec!["nmstate", "nm_dbus"], LevelFilter::Debug),
_ => (vec![""], LevelFilter::Debug),
};
let mut log_builder = Builder::new();
for log_module_filter in log_module_filters {
if log_module_filter.is_empty() {
log_builder.filter(Some(log_module_filter), log_level);
} else {
log_builder.filter(None, log_level);
}
}
log_builder.init();
if let Some(matches) = matches.subcommand_matches(SUB_CMD_GEN_CONF) {
if let Some(file_path) = matches.value_of("STATE_FILE") {
print_result_and_exit(gen_conf(file_path));
}
} else if let Some(matches) = matches.subcommand_matches(SUB_CMD_SHOW) {
print_result_and_exit(show(matches));
} else if let Some(matches) = matches.subcommand_matches(SUB_CMD_APPLY) {
let is_kernel = matches.is_present("KERNEL");
if let Some(file_path) = matches.value_of("STATE_FILE") {
print_result_and_exit(apply_from_file(file_path, is_kernel));
} else {
print_result_and_exit(apply_from_stdin(is_kernel));
}
}
}
// Use T instead of String where T has Serialize
fn print_result_and_exit(result: Result<String, CliError>) {
match result {
Ok(s) => {
println!("{}", s);
std::process::exit(0);
}
Err(e) => {
eprintln!("{}", e);
std::process::exit(1);
}
}
}
fn gen_conf(file_path: &str) -> Result<String, CliError> {
let fd = std::fs::File::open(file_path)?;
let net_state: NetworkState = serde_yaml::from_reader(fd)?;
let confs = net_state.gen_conf()?;
Ok(serde_yaml::to_string(&confs)?)
}
#[derive(Clone, Debug, PartialEq, Serialize)]
struct SortedNetworkState {
interfaces: Vec<Value>,
}
const IFACE_TOP_PRIORTIES: [&str; 2] = ["name", "type"];
fn sort_netstate(
net_state: NetworkState,
) -> Result<SortedNetworkState, CliError> {
let mut ifaces = net_state.interfaces.to_vec();
ifaces.sort_by(|a, b| a.name().cmp(b.name()));
if let Value::Sequence(ifaces) = serde_yaml::to_value(&ifaces)? {
let mut new_ifaces = Vec::new();
for iface_v in ifaces {
if let Value::Mapping(iface) = iface_v {
let mut new_iface = serde_yaml::Mapping::new();
for top_property in IFACE_TOP_PRIORTIES {
if let Some(v) =
iface.get(&Value::String(top_property.to_string()))
{
new_iface.insert(
Value::String(top_property.to_string()),
v.clone(),
);
}
}
for (k, v) in iface.iter() {
if let Value::String(ref name) = k {
if IFACE_TOP_PRIORTIES.contains(&name.as_str()) {
continue;
}
}
new_iface.insert(k.clone(), v.clone());
}
new_ifaces.push(Value::Mapping(new_iface));
}
}
return Ok(SortedNetworkState {
interfaces: new_ifaces,
});
}
Ok(SortedNetworkState {
interfaces: Vec::new(),
})
}
// Ordering the outputs
fn show(matches: &clap::ArgMatches) -> Result<String, CliError> {
let mut net_state = NetworkState::new();
if matches.is_present("KERNEL") {
net_state.set_kernel_only(true);
}
net_state.retrieve()?;
Ok(if let Some(ifname) = matches.value_of("IFNAME") {
let mut new_net_state = NetworkState::new();
new_net_state.set_kernel_only(matches.is_present("KERNEL"));
for iface in net_state.interfaces.to_vec() {
if iface.name() == ifname {
new_net_state.append_interface_data(iface.clone())
}
}
serde_yaml::to_string(&new_net_state)?
} else {
serde_yaml::to_string(&sort_netstate(net_state)?)?
})
}
fn apply_from_stdin(kernel_only: bool) -> Result<String, CliError> {
apply(io::stdin(), kernel_only)
}
fn apply_from_file(file_path: &str, kernel_only: bool) -> Result<String, CliError> {
apply(std::fs::File::open(file_path)?, kernel_only)
}
fn apply<R>(reader: R, kernel_only: bool) -> Result<String, CliError> where R: Read{
let mut net_state: NetworkState = serde_yaml::from_reader(reader)?;
net_state.set_kernel_only(kernel_only);
net_state.apply()?;
let sorted_net_state = sort_netstate(net_state)?;
Ok(serde_yaml::to_string(&sorted_net_state)?)
}
|
use super::rocket;
use rocket::local::Client;
use rocket::http::Status;
use std::fs::File;
use std::io::Read;
fn test_query_file<T> (path: &str, file: T, status: Status)
where T: Into<Option<&'static str>>
{
let client = Client::new(rocket()).unwrap();
let mut response = client.get(path).dispatch();
assert_eq!(response.status(), status);
let body_data = response.body().and_then(|body| body.into_bytes());
if let Some(filename) = file.into() {
let expected_data = read_file_content(filename);
assert!(body_data.map_or(false, |s| s == expected_data));
}
}
fn read_file_content(path: &str) -> Vec<u8> {
let mut fp = File::open(&path).expect(&format!("Can't open {}", path));
let mut file_content = vec![];
fp.read_to_end(&mut file_content).expect(&format!("Reading {} failed.", path));
file_content
}
#[test]
fn test_index() {
let client = Client::new(rocket()).expect("valid rocket instance");
let mut response = client.get("/").dispatch();
assert_eq!(response.status(), Status::Ok);
assert!(response.body_string().is_some());
}
#[test]
fn test_post_content() {
let client = Client::new(rocket()).expect("valid rocket instance");
let mut response = client.get("/").dispatch();
assert_eq!(response.status(), Status::Ok);
assert!(response.body_string().unwrap()
.find("class=\"post\"")
.is_some());
}
#[test]
fn test_invalid_path() {
test_query_file("/thou_shalt_not_exist", None, Status::NotFound);
test_query_file("/thou/shalt/not/exist", None, Status::NotFound);
test_query_file("/thou/shalt/not/exist?a=b&c=d", None, Status::NotFound);
}
#[test]
fn test_favicon() {
test_query_file("/images/favicon.ico", "static/images/favicon.ico", Status::Ok);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.