text
stringlengths
8
4.13M
//! Private configuration settings example file. Will be copied into the correct location by the build script. pub const DB_URL: &'static str = "mysql://username:password@hoste.com/databasename"; pub const API_KEY: &'static str = "1111111111222222222333333333"; pub const DB_CREDENTIALS: &'static str = "mysql://username:password@localhost/osutrack";
#[doc = "Register `MCR` reader"] pub type R = crate::R<MCR_SPEC>; #[doc = "Register `MCR` writer"] pub type W = crate::W<MCR_SPEC>; #[doc = "Field `CKPSC` reader - HRTIM Master Clock prescaler"] pub type CKPSC_R = crate::FieldReader; #[doc = "Field `CKPSC` writer - HRTIM Master Clock prescaler"] pub type CKPSC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O>; #[doc = "Field `CONT` reader - Master Continuous mode"] pub type CONT_R = crate::BitReader<CONT_A>; #[doc = "Master Continuous mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CONT_A { #[doc = "0: The timer operates in single-shot mode and stops when it reaches the MPER value"] SingleShot = 0, #[doc = "1: The timer operates in continuous (free-running) mode and rolls over to zero when it reaches the MPER value"] Continuous = 1, } impl From<CONT_A> for bool { #[inline(always)] fn from(variant: CONT_A) -> Self { variant as u8 != 0 } } impl CONT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CONT_A { match self.bits { false => CONT_A::SingleShot, true => CONT_A::Continuous, } } #[doc = "The timer operates in single-shot mode and stops when it reaches the MPER value"] #[inline(always)] pub fn is_single_shot(&self) -> bool { *self == CONT_A::SingleShot } #[doc = "The timer operates in continuous (free-running) mode and rolls over to zero when it reaches the MPER value"] #[inline(always)] pub fn is_continuous(&self) -> bool { *self == CONT_A::Continuous } } #[doc = "Field `CONT` writer - Master Continuous mode"] pub type CONT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CONT_A>; impl<'a, REG, const O: u8> CONT_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "The timer operates in single-shot mode and stops when it reaches the MPER value"] #[inline(always)] pub fn single_shot(self) -> &'a mut crate::W<REG> { self.variant(CONT_A::SingleShot) } #[doc = "The timer operates in continuous (free-running) mode and rolls over to zero when it reaches the MPER value"] #[inline(always)] pub fn continuous(self) -> &'a mut crate::W<REG> { self.variant(CONT_A::Continuous) } } #[doc = "Field `RETRIG` reader - Master Re-triggerable mode"] pub type RETRIG_R = crate::BitReader<RETRIG_A>; #[doc = "Master Re-triggerable mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum RETRIG_A { #[doc = "0: The timer is not re-triggerable: a counter reset can be done only if the counter is stopped"] Disabled = 0, #[doc = "1: The timer is retriggerable: a counter reset is done whatever the counter state"] Enabled = 1, } impl From<RETRIG_A> for bool { #[inline(always)] fn from(variant: RETRIG_A) -> Self { variant as u8 != 0 } } impl RETRIG_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RETRIG_A { match self.bits { false => RETRIG_A::Disabled, true => RETRIG_A::Enabled, } } #[doc = "The timer is not re-triggerable: a counter reset can be done only if the counter is stopped"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == RETRIG_A::Disabled } #[doc = "The timer is retriggerable: a counter reset is done whatever the counter state"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == RETRIG_A::Enabled } } #[doc = "Field `RETRIG` writer - Master Re-triggerable mode"] pub type RETRIG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RETRIG_A>; impl<'a, REG, const O: u8> RETRIG_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "The timer is not re-triggerable: a counter reset can be done only if the counter is stopped"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(RETRIG_A::Disabled) } #[doc = "The timer is retriggerable: a counter reset is done whatever the counter state"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(RETRIG_A::Enabled) } } #[doc = "Field `HALF` reader - Half mode enable"] pub type HALF_R = crate::BitReader<HALF_A>; #[doc = "Half mode enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum HALF_A { #[doc = "0: Half mode disabled"] Disabled = 0, #[doc = "1: Half mode enabled"] Enabled = 1, } impl From<HALF_A> for bool { #[inline(always)] fn from(variant: HALF_A) -> Self { variant as u8 != 0 } } impl HALF_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> HALF_A { match self.bits { false => HALF_A::Disabled, true => HALF_A::Enabled, } } #[doc = "Half mode disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == HALF_A::Disabled } #[doc = "Half mode enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == HALF_A::Enabled } } #[doc = "Field `HALF` writer - Half mode enable"] pub type HALF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, HALF_A>; impl<'a, REG, const O: u8> HALF_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Half mode disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(HALF_A::Disabled) } #[doc = "Half mode enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(HALF_A::Enabled) } } #[doc = "Field `SYNCIN` reader - ynchronization input"] pub type SYNCIN_R = crate::FieldReader<SYNCIN_A>; #[doc = "ynchronization input\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum SYNCIN_A { #[doc = "0: Disabled. HRTIM is not synchronized and runs in standalone mode"] Disabled = 0, #[doc = "2: Internal event: the HRTIM is synchronized with the on-chip timer"] Internal = 2, #[doc = "3: External event: a positive pulse on HRTIM_SCIN input triggers the HRTIM"] External = 3, } impl From<SYNCIN_A> for u8 { #[inline(always)] fn from(variant: SYNCIN_A) -> Self { variant as _ } } impl crate::FieldSpec for SYNCIN_A { type Ux = u8; } impl SYNCIN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<SYNCIN_A> { match self.bits { 0 => Some(SYNCIN_A::Disabled), 2 => Some(SYNCIN_A::Internal), 3 => Some(SYNCIN_A::External), _ => None, } } #[doc = "Disabled. HRTIM is not synchronized and runs in standalone mode"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == SYNCIN_A::Disabled } #[doc = "Internal event: the HRTIM is synchronized with the on-chip timer"] #[inline(always)] pub fn is_internal(&self) -> bool { *self == SYNCIN_A::Internal } #[doc = "External event: a positive pulse on HRTIM_SCIN input triggers the HRTIM"] #[inline(always)] pub fn is_external(&self) -> bool { *self == SYNCIN_A::External } } #[doc = "Field `SYNCIN` writer - ynchronization input"] pub type SYNCIN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, SYNCIN_A>; impl<'a, REG, const O: u8> SYNCIN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "Disabled. HRTIM is not synchronized and runs in standalone mode"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCIN_A::Disabled) } #[doc = "Internal event: the HRTIM is synchronized with the on-chip timer"] #[inline(always)] pub fn internal(self) -> &'a mut crate::W<REG> { self.variant(SYNCIN_A::Internal) } #[doc = "External event: a positive pulse on HRTIM_SCIN input triggers the HRTIM"] #[inline(always)] pub fn external(self) -> &'a mut crate::W<REG> { self.variant(SYNCIN_A::External) } } #[doc = "Field `SYNCRSTM` reader - Synchronization Resets Master"] pub type SYNCRSTM_R = crate::BitReader<SYNCRSTM_A>; #[doc = "Synchronization Resets Master\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SYNCRSTM_A { #[doc = "0: No effect on the master timer"] Disabled = 0, #[doc = "1: A synchroniation input event resets the master timer"] Enabled = 1, } impl From<SYNCRSTM_A> for bool { #[inline(always)] fn from(variant: SYNCRSTM_A) -> Self { variant as u8 != 0 } } impl SYNCRSTM_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> SYNCRSTM_A { match self.bits { false => SYNCRSTM_A::Disabled, true => SYNCRSTM_A::Enabled, } } #[doc = "No effect on the master timer"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == SYNCRSTM_A::Disabled } #[doc = "A synchroniation input event resets the master timer"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == SYNCRSTM_A::Enabled } } #[doc = "Field `SYNCRSTM` writer - Synchronization Resets Master"] pub type SYNCRSTM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SYNCRSTM_A>; impl<'a, REG, const O: u8> SYNCRSTM_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "No effect on the master timer"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCRSTM_A::Disabled) } #[doc = "A synchroniation input event resets the master timer"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCRSTM_A::Enabled) } } #[doc = "Field `SYNCSTRTM` reader - Synchronization Starts Master"] pub type SYNCSTRTM_R = crate::BitReader<SYNCSTRTM_A>; #[doc = "Synchronization Starts Master\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SYNCSTRTM_A { #[doc = "0: No effect on the master timer"] Disabled = 0, #[doc = "1: A synchroniation input event starts the master timer"] Enabled = 1, } impl From<SYNCSTRTM_A> for bool { #[inline(always)] fn from(variant: SYNCSTRTM_A) -> Self { variant as u8 != 0 } } impl SYNCSTRTM_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> SYNCSTRTM_A { match self.bits { false => SYNCSTRTM_A::Disabled, true => SYNCSTRTM_A::Enabled, } } #[doc = "No effect on the master timer"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == SYNCSTRTM_A::Disabled } #[doc = "A synchroniation input event starts the master timer"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == SYNCSTRTM_A::Enabled } } #[doc = "Field `SYNCSTRTM` writer - Synchronization Starts Master"] pub type SYNCSTRTM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SYNCSTRTM_A>; impl<'a, REG, const O: u8> SYNCSTRTM_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "No effect on the master timer"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCSTRTM_A::Disabled) } #[doc = "A synchroniation input event starts the master timer"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCSTRTM_A::Enabled) } } #[doc = "Field `SYNCOUT` reader - Synchronization output"] pub type SYNCOUT_R = crate::FieldReader<SYNCOUT_A>; #[doc = "Synchronization output\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum SYNCOUT_A { #[doc = "0: Disabled"] Disabled = 0, #[doc = "2: Positive pulse on SCOUT output (16x f_HRTIM clock cycles)"] PositivePulse = 2, #[doc = "3: Negative pulse on SCOUT output (16x f_HRTIM clock cycles)"] NegativePulse = 3, } impl From<SYNCOUT_A> for u8 { #[inline(always)] fn from(variant: SYNCOUT_A) -> Self { variant as _ } } impl crate::FieldSpec for SYNCOUT_A { type Ux = u8; } impl SYNCOUT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<SYNCOUT_A> { match self.bits { 0 => Some(SYNCOUT_A::Disabled), 2 => Some(SYNCOUT_A::PositivePulse), 3 => Some(SYNCOUT_A::NegativePulse), _ => None, } } #[doc = "Disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == SYNCOUT_A::Disabled } #[doc = "Positive pulse on SCOUT output (16x f_HRTIM clock cycles)"] #[inline(always)] pub fn is_positive_pulse(&self) -> bool { *self == SYNCOUT_A::PositivePulse } #[doc = "Negative pulse on SCOUT output (16x f_HRTIM clock cycles)"] #[inline(always)] pub fn is_negative_pulse(&self) -> bool { *self == SYNCOUT_A::NegativePulse } } #[doc = "Field `SYNCOUT` writer - Synchronization output"] pub type SYNCOUT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, SYNCOUT_A>; impl<'a, REG, const O: u8> SYNCOUT_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "Disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(SYNCOUT_A::Disabled) } #[doc = "Positive pulse on SCOUT output (16x f_HRTIM clock cycles)"] #[inline(always)] pub fn positive_pulse(self) -> &'a mut crate::W<REG> { self.variant(SYNCOUT_A::PositivePulse) } #[doc = "Negative pulse on SCOUT output (16x f_HRTIM clock cycles)"] #[inline(always)] pub fn negative_pulse(self) -> &'a mut crate::W<REG> { self.variant(SYNCOUT_A::NegativePulse) } } #[doc = "Field `SYNCSRC` reader - Synchronization source"] pub type SYNCSRC_R = crate::FieldReader<SYNCSRC_A>; #[doc = "Synchronization source\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum SYNCSRC_A { #[doc = "0: Master timer Start"] MasterStart = 0, #[doc = "1: Master timer Compare 1 event"] MasterCompare1 = 1, #[doc = "2: Timer A start/reset"] TimerAstart = 2, #[doc = "3: Timer A Compare 1 event"] TimerAcompare1 = 3, } impl From<SYNCSRC_A> for u8 { #[inline(always)] fn from(variant: SYNCSRC_A) -> Self { variant as _ } } impl crate::FieldSpec for SYNCSRC_A { type Ux = u8; } impl SYNCSRC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> SYNCSRC_A { match self.bits { 0 => SYNCSRC_A::MasterStart, 1 => SYNCSRC_A::MasterCompare1, 2 => SYNCSRC_A::TimerAstart, 3 => SYNCSRC_A::TimerAcompare1, _ => unreachable!(), } } #[doc = "Master timer Start"] #[inline(always)] pub fn is_master_start(&self) -> bool { *self == SYNCSRC_A::MasterStart } #[doc = "Master timer Compare 1 event"] #[inline(always)] pub fn is_master_compare1(&self) -> bool { *self == SYNCSRC_A::MasterCompare1 } #[doc = "Timer A start/reset"] #[inline(always)] pub fn is_timer_astart(&self) -> bool { *self == SYNCSRC_A::TimerAstart } #[doc = "Timer A Compare 1 event"] #[inline(always)] pub fn is_timer_acompare1(&self) -> bool { *self == SYNCSRC_A::TimerAcompare1 } } #[doc = "Field `SYNCSRC` writer - Synchronization source"] pub type SYNCSRC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, SYNCSRC_A>; impl<'a, REG, const O: u8> SYNCSRC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "Master timer Start"] #[inline(always)] pub fn master_start(self) -> &'a mut crate::W<REG> { self.variant(SYNCSRC_A::MasterStart) } #[doc = "Master timer Compare 1 event"] #[inline(always)] pub fn master_compare1(self) -> &'a mut crate::W<REG> { self.variant(SYNCSRC_A::MasterCompare1) } #[doc = "Timer A start/reset"] #[inline(always)] pub fn timer_astart(self) -> &'a mut crate::W<REG> { self.variant(SYNCSRC_A::TimerAstart) } #[doc = "Timer A Compare 1 event"] #[inline(always)] pub fn timer_acompare1(self) -> &'a mut crate::W<REG> { self.variant(SYNCSRC_A::TimerAcompare1) } } #[doc = "Field `MCEN` reader - Master Counter enable"] pub type MCEN_R = crate::BitReader<MCEN_A>; #[doc = "Master Counter enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum MCEN_A { #[doc = "0: Master timer counter disabled"] Disabled = 0, #[doc = "1: Master timer counter enabled"] Enabled = 1, } impl From<MCEN_A> for bool { #[inline(always)] fn from(variant: MCEN_A) -> Self { variant as u8 != 0 } } impl MCEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MCEN_A { match self.bits { false => MCEN_A::Disabled, true => MCEN_A::Enabled, } } #[doc = "Master timer counter disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == MCEN_A::Disabled } #[doc = "Master timer counter enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == MCEN_A::Enabled } } #[doc = "Field `MCEN` writer - Master Counter enable"] pub type MCEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MCEN_A>; impl<'a, REG, const O: u8> MCEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Master timer counter disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(MCEN_A::Disabled) } #[doc = "Master timer counter enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(MCEN_A::Enabled) } } #[doc = "Field `TACEN` reader - Timer A counter enable"] pub type TACEN_R = crate::BitReader<TACEN_A>; #[doc = "Timer A counter enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum TACEN_A { #[doc = "0: Timer counter disabled"] Disabled = 0, #[doc = "1: Timer counter enabled"] Enabled = 1, } impl From<TACEN_A> for bool { #[inline(always)] fn from(variant: TACEN_A) -> Self { variant as u8 != 0 } } impl TACEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TACEN_A { match self.bits { false => TACEN_A::Disabled, true => TACEN_A::Enabled, } } #[doc = "Timer counter disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == TACEN_A::Disabled } #[doc = "Timer counter enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == TACEN_A::Enabled } } #[doc = "Field `TACEN` writer - Timer A counter enable"] pub type TACEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TACEN_A>; impl<'a, REG, const O: u8> TACEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Timer counter disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(TACEN_A::Disabled) } #[doc = "Timer counter enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(TACEN_A::Enabled) } } #[doc = "Field `TBCEN` reader - Timer B counter enable"] pub use TACEN_R as TBCEN_R; #[doc = "Field `TCCEN` reader - Timer C counter enable"] pub use TACEN_R as TCCEN_R; #[doc = "Field `TDCEN` reader - Timer D counter enable"] pub use TACEN_R as TDCEN_R; #[doc = "Field `TECEN` reader - Timer E counter enable"] pub use TACEN_R as TECEN_R; #[doc = "Field `TBCEN` writer - Timer B counter enable"] pub use TACEN_W as TBCEN_W; #[doc = "Field `TCCEN` writer - Timer C counter enable"] pub use TACEN_W as TCCEN_W; #[doc = "Field `TDCEN` writer - Timer D counter enable"] pub use TACEN_W as TDCEN_W; #[doc = "Field `TECEN` writer - Timer E counter enable"] pub use TACEN_W as TECEN_W; #[doc = "Field `DACSYNC` reader - AC Synchronization"] pub type DACSYNC_R = crate::FieldReader<DACSYNC_A>; #[doc = "AC Synchronization\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum DACSYNC_A { #[doc = "0: No DAC trigger generated"] Disabled = 0, #[doc = "1: Trigger generated on DACSync1"] Dacsync1 = 1, #[doc = "2: Trigger generated on DACSync2"] Dacsync2 = 2, #[doc = "3: Trigger generated on DACSync3"] Dacsync3 = 3, } impl From<DACSYNC_A> for u8 { #[inline(always)] fn from(variant: DACSYNC_A) -> Self { variant as _ } } impl crate::FieldSpec for DACSYNC_A { type Ux = u8; } impl DACSYNC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DACSYNC_A { match self.bits { 0 => DACSYNC_A::Disabled, 1 => DACSYNC_A::Dacsync1, 2 => DACSYNC_A::Dacsync2, 3 => DACSYNC_A::Dacsync3, _ => unreachable!(), } } #[doc = "No DAC trigger generated"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == DACSYNC_A::Disabled } #[doc = "Trigger generated on DACSync1"] #[inline(always)] pub fn is_dacsync1(&self) -> bool { *self == DACSYNC_A::Dacsync1 } #[doc = "Trigger generated on DACSync2"] #[inline(always)] pub fn is_dacsync2(&self) -> bool { *self == DACSYNC_A::Dacsync2 } #[doc = "Trigger generated on DACSync3"] #[inline(always)] pub fn is_dacsync3(&self) -> bool { *self == DACSYNC_A::Dacsync3 } } #[doc = "Field `DACSYNC` writer - AC Synchronization"] pub type DACSYNC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, DACSYNC_A>; impl<'a, REG, const O: u8> DACSYNC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "No DAC trigger generated"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(DACSYNC_A::Disabled) } #[doc = "Trigger generated on DACSync1"] #[inline(always)] pub fn dacsync1(self) -> &'a mut crate::W<REG> { self.variant(DACSYNC_A::Dacsync1) } #[doc = "Trigger generated on DACSync2"] #[inline(always)] pub fn dacsync2(self) -> &'a mut crate::W<REG> { self.variant(DACSYNC_A::Dacsync2) } #[doc = "Trigger generated on DACSync3"] #[inline(always)] pub fn dacsync3(self) -> &'a mut crate::W<REG> { self.variant(DACSYNC_A::Dacsync3) } } #[doc = "Field `PREEN` reader - Preload enable"] pub type PREEN_R = crate::BitReader<PREEN_A>; #[doc = "Preload enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum PREEN_A { #[doc = "0: Preload disabled: the write access is directly done into the active register"] Disabled = 0, #[doc = "1: Preload enabled: the write access is done into the preload register"] Enabled = 1, } impl From<PREEN_A> for bool { #[inline(always)] fn from(variant: PREEN_A) -> Self { variant as u8 != 0 } } impl PREEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PREEN_A { match self.bits { false => PREEN_A::Disabled, true => PREEN_A::Enabled, } } #[doc = "Preload disabled: the write access is directly done into the active register"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == PREEN_A::Disabled } #[doc = "Preload enabled: the write access is done into the preload register"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == PREEN_A::Enabled } } #[doc = "Field `PREEN` writer - Preload enable"] pub type PREEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PREEN_A>; impl<'a, REG, const O: u8> PREEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Preload disabled: the write access is directly done into the active register"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(PREEN_A::Disabled) } #[doc = "Preload enabled: the write access is done into the preload register"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(PREEN_A::Enabled) } } #[doc = "Field `MREPU` reader - Master Timer Repetition update"] pub type MREPU_R = crate::BitReader<MREPU_A>; #[doc = "Master Timer Repetition update\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum MREPU_A { #[doc = "0: Update on repetition disabled"] Disabled = 0, #[doc = "1: Update on repetition enabled"] Enabled = 1, } impl From<MREPU_A> for bool { #[inline(always)] fn from(variant: MREPU_A) -> Self { variant as u8 != 0 } } impl MREPU_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MREPU_A { match self.bits { false => MREPU_A::Disabled, true => MREPU_A::Enabled, } } #[doc = "Update on repetition disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == MREPU_A::Disabled } #[doc = "Update on repetition enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == MREPU_A::Enabled } } #[doc = "Field `MREPU` writer - Master Timer Repetition update"] pub type MREPU_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MREPU_A>; impl<'a, REG, const O: u8> MREPU_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Update on repetition disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(MREPU_A::Disabled) } #[doc = "Update on repetition enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(MREPU_A::Enabled) } } #[doc = "Field `BRSTDMA` reader - Burst DMA Update"] pub type BRSTDMA_R = crate::FieldReader<BRSTDMA_A>; #[doc = "Burst DMA Update\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum BRSTDMA_A { #[doc = "0: Update done independently from the DMA burst transfer completion"] Independent = 0, #[doc = "1: Update done when the DMA burst transfer is completed"] Completion = 1, #[doc = "2: Update done on master timer roll-over following a DMA burst transfer completion"] Rollover = 2, } impl From<BRSTDMA_A> for u8 { #[inline(always)] fn from(variant: BRSTDMA_A) -> Self { variant as _ } } impl crate::FieldSpec for BRSTDMA_A { type Ux = u8; } impl BRSTDMA_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<BRSTDMA_A> { match self.bits { 0 => Some(BRSTDMA_A::Independent), 1 => Some(BRSTDMA_A::Completion), 2 => Some(BRSTDMA_A::Rollover), _ => None, } } #[doc = "Update done independently from the DMA burst transfer completion"] #[inline(always)] pub fn is_independent(&self) -> bool { *self == BRSTDMA_A::Independent } #[doc = "Update done when the DMA burst transfer is completed"] #[inline(always)] pub fn is_completion(&self) -> bool { *self == BRSTDMA_A::Completion } #[doc = "Update done on master timer roll-over following a DMA burst transfer completion"] #[inline(always)] pub fn is_rollover(&self) -> bool { *self == BRSTDMA_A::Rollover } } #[doc = "Field `BRSTDMA` writer - Burst DMA Update"] pub type BRSTDMA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, BRSTDMA_A>; impl<'a, REG, const O: u8> BRSTDMA_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "Update done independently from the DMA burst transfer completion"] #[inline(always)] pub fn independent(self) -> &'a mut crate::W<REG> { self.variant(BRSTDMA_A::Independent) } #[doc = "Update done when the DMA burst transfer is completed"] #[inline(always)] pub fn completion(self) -> &'a mut crate::W<REG> { self.variant(BRSTDMA_A::Completion) } #[doc = "Update done on master timer roll-over following a DMA burst transfer completion"] #[inline(always)] pub fn rollover(self) -> &'a mut crate::W<REG> { self.variant(BRSTDMA_A::Rollover) } } impl R { #[doc = "Bits 0:2 - HRTIM Master Clock prescaler"] #[inline(always)] pub fn ckpsc(&self) -> CKPSC_R { CKPSC_R::new((self.bits & 7) as u8) } #[doc = "Bit 3 - Master Continuous mode"] #[inline(always)] pub fn cont(&self) -> CONT_R { CONT_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - Master Re-triggerable mode"] #[inline(always)] pub fn retrig(&self) -> RETRIG_R { RETRIG_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - Half mode enable"] #[inline(always)] pub fn half(&self) -> HALF_R { HALF_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bits 8:9 - ynchronization input"] #[inline(always)] pub fn syncin(&self) -> SYNCIN_R { SYNCIN_R::new(((self.bits >> 8) & 3) as u8) } #[doc = "Bit 10 - Synchronization Resets Master"] #[inline(always)] pub fn syncrstm(&self) -> SYNCRSTM_R { SYNCRSTM_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - Synchronization Starts Master"] #[inline(always)] pub fn syncstrtm(&self) -> SYNCSTRTM_R { SYNCSTRTM_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bits 12:13 - Synchronization output"] #[inline(always)] pub fn syncout(&self) -> SYNCOUT_R { SYNCOUT_R::new(((self.bits >> 12) & 3) as u8) } #[doc = "Bits 14:15 - Synchronization source"] #[inline(always)] pub fn syncsrc(&self) -> SYNCSRC_R { SYNCSRC_R::new(((self.bits >> 14) & 3) as u8) } #[doc = "Bit 16 - Master Counter enable"] #[inline(always)] pub fn mcen(&self) -> MCEN_R { MCEN_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - Timer A counter enable"] #[inline(always)] pub fn tacen(&self) -> TACEN_R { TACEN_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - Timer B counter enable"] #[inline(always)] pub fn tbcen(&self) -> TBCEN_R { TBCEN_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - Timer C counter enable"] #[inline(always)] pub fn tccen(&self) -> TCCEN_R { TCCEN_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - Timer D counter enable"] #[inline(always)] pub fn tdcen(&self) -> TDCEN_R { TDCEN_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - Timer E counter enable"] #[inline(always)] pub fn tecen(&self) -> TECEN_R { TECEN_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bits 25:26 - AC Synchronization"] #[inline(always)] pub fn dacsync(&self) -> DACSYNC_R { DACSYNC_R::new(((self.bits >> 25) & 3) as u8) } #[doc = "Bit 27 - Preload enable"] #[inline(always)] pub fn preen(&self) -> PREEN_R { PREEN_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 29 - Master Timer Repetition update"] #[inline(always)] pub fn mrepu(&self) -> MREPU_R { MREPU_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bits 30:31 - Burst DMA Update"] #[inline(always)] pub fn brstdma(&self) -> BRSTDMA_R { BRSTDMA_R::new(((self.bits >> 30) & 3) as u8) } } impl W { #[doc = "Bits 0:2 - HRTIM Master Clock prescaler"] #[inline(always)] #[must_use] pub fn ckpsc(&mut self) -> CKPSC_W<MCR_SPEC, 0> { CKPSC_W::new(self) } #[doc = "Bit 3 - Master Continuous mode"] #[inline(always)] #[must_use] pub fn cont(&mut self) -> CONT_W<MCR_SPEC, 3> { CONT_W::new(self) } #[doc = "Bit 4 - Master Re-triggerable mode"] #[inline(always)] #[must_use] pub fn retrig(&mut self) -> RETRIG_W<MCR_SPEC, 4> { RETRIG_W::new(self) } #[doc = "Bit 5 - Half mode enable"] #[inline(always)] #[must_use] pub fn half(&mut self) -> HALF_W<MCR_SPEC, 5> { HALF_W::new(self) } #[doc = "Bits 8:9 - ynchronization input"] #[inline(always)] #[must_use] pub fn syncin(&mut self) -> SYNCIN_W<MCR_SPEC, 8> { SYNCIN_W::new(self) } #[doc = "Bit 10 - Synchronization Resets Master"] #[inline(always)] #[must_use] pub fn syncrstm(&mut self) -> SYNCRSTM_W<MCR_SPEC, 10> { SYNCRSTM_W::new(self) } #[doc = "Bit 11 - Synchronization Starts Master"] #[inline(always)] #[must_use] pub fn syncstrtm(&mut self) -> SYNCSTRTM_W<MCR_SPEC, 11> { SYNCSTRTM_W::new(self) } #[doc = "Bits 12:13 - Synchronization output"] #[inline(always)] #[must_use] pub fn syncout(&mut self) -> SYNCOUT_W<MCR_SPEC, 12> { SYNCOUT_W::new(self) } #[doc = "Bits 14:15 - Synchronization source"] #[inline(always)] #[must_use] pub fn syncsrc(&mut self) -> SYNCSRC_W<MCR_SPEC, 14> { SYNCSRC_W::new(self) } #[doc = "Bit 16 - Master Counter enable"] #[inline(always)] #[must_use] pub fn mcen(&mut self) -> MCEN_W<MCR_SPEC, 16> { MCEN_W::new(self) } #[doc = "Bit 17 - Timer A counter enable"] #[inline(always)] #[must_use] pub fn tacen(&mut self) -> TACEN_W<MCR_SPEC, 17> { TACEN_W::new(self) } #[doc = "Bit 18 - Timer B counter enable"] #[inline(always)] #[must_use] pub fn tbcen(&mut self) -> TBCEN_W<MCR_SPEC, 18> { TBCEN_W::new(self) } #[doc = "Bit 19 - Timer C counter enable"] #[inline(always)] #[must_use] pub fn tccen(&mut self) -> TCCEN_W<MCR_SPEC, 19> { TCCEN_W::new(self) } #[doc = "Bit 20 - Timer D counter enable"] #[inline(always)] #[must_use] pub fn tdcen(&mut self) -> TDCEN_W<MCR_SPEC, 20> { TDCEN_W::new(self) } #[doc = "Bit 21 - Timer E counter enable"] #[inline(always)] #[must_use] pub fn tecen(&mut self) -> TECEN_W<MCR_SPEC, 21> { TECEN_W::new(self) } #[doc = "Bits 25:26 - AC Synchronization"] #[inline(always)] #[must_use] pub fn dacsync(&mut self) -> DACSYNC_W<MCR_SPEC, 25> { DACSYNC_W::new(self) } #[doc = "Bit 27 - Preload enable"] #[inline(always)] #[must_use] pub fn preen(&mut self) -> PREEN_W<MCR_SPEC, 27> { PREEN_W::new(self) } #[doc = "Bit 29 - Master Timer Repetition update"] #[inline(always)] #[must_use] pub fn mrepu(&mut self) -> MREPU_W<MCR_SPEC, 29> { MREPU_W::new(self) } #[doc = "Bits 30:31 - Burst DMA Update"] #[inline(always)] #[must_use] pub fn brstdma(&mut self) -> BRSTDMA_W<MCR_SPEC, 30> { BRSTDMA_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Master Timer Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct MCR_SPEC; impl crate::RegisterSpec for MCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`mcr::R`](R) reader structure"] impl crate::Readable for MCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`mcr::W`](W) writer structure"] impl crate::Writable for MCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets MCR to value 0"] impl crate::Resettable for MCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
mod token; mod scanner; mod parser; mod interpreter; mod environment; use std::{env, sync::Mutex}; use std::process; use std::io; use std::fs; use std::sync::atomic::{AtomicBool, Ordering}; use interpreter::{Interpreter, RuntimeError}; use token::{Token, TokenType}; use scanner::Scanner; use parser::{ParseError, Parser}; use lazy_static::lazy_static; fn main() { let mut lox = Lox{ had_error: false }; lox.main(); } static HAD_ERROR: AtomicBool = AtomicBool::new(false); static HAD_RUNTIME_ERROR: AtomicBool = AtomicBool::new(false); lazy_static! { static ref INTERPRETER: Mutex<Interpreter> = Mutex::new(Interpreter::new()); } struct Lox { had_error: bool } impl Lox { fn main(&mut self) { let _args_: Vec<String> = env::args().collect(); self.run_prompt(); // if args.len() > 1 { // println!("Usage: rlox [script]"); // process::exit(64); // } else if args.len() == 1 { // self.run_file(&args[0]).unwrap(); // } else { // self.run_prompt(); // } } fn _run_file(&self, path: &str) -> io::Result<()> { let bytes = fs::read(path).unwrap(); let string = std::str::from_utf8(&bytes).unwrap().to_owned(); self.run(string); if HAD_ERROR.load(Ordering::Relaxed) == true { process::exit(64); } if HAD_RUNTIME_ERROR.load(Ordering::Relaxed) == true { process::exit(70); } Ok(()) } fn run_prompt(&mut self) { loop { println!("> "); let mut line = String::new(); io::stdin().read_line(&mut line).unwrap(); if line.len() == 0 { break; } self.run(line); self.had_error = false; // reset after every loop. if a user makes a mistake, it shouldn't kill their entire session. } } fn run(&self, source: String) { let mut scanner = Scanner::new(source); let tokens: Vec<Token> = scanner.scan_tokens(); let mut parser = Parser::new(tokens); let stmts = parser.parse(); if self.had_error { return; } let mut i = INTERPRETER.lock().unwrap(); i.interpret(stmts); } fn error(line: u32, message: String) { Lox::report(line, "".to_owned(), message); } fn parse_error(error: ParseError) { let ParseError(token, message) = error; if token.token_type == TokenType::Eof { Lox::report(token.line, "at end".to_owned(), message) } else { Lox::report(token.line, format!("at, {}", token.lexeme), message) } } fn runtime_error(error: RuntimeError) { let RuntimeError(token, message) = error; println!("{} \n[line {}]", message, token.line); HAD_RUNTIME_ERROR.store(true, Ordering::Relaxed); } fn report(line: u32, where_: String, message: String) { println!("[line {}] Error {}: {}", line, where_, message); HAD_ERROR.store(true, Ordering::Relaxed); } }
//! Tests auto-converted from "sass-spec/spec/core_functions/math" #[allow(unused)] use super::rsass; // From "sass-spec/spec/core_functions/math/abs.hrx" mod abs { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] fn named() { assert_eq!( rsass( "a {b: abs($number: -3)}\ \n" ) .unwrap(), "a {\ \n b: 3;\ \n}\ \n" ); } mod negative { #[allow(unused)] use super::rsass; #[test] fn decimal() { assert_eq!( rsass( "a {b: abs(-123.456)}\ \n" ) .unwrap(), "a {\ \n b: 123.456;\ \n}\ \n" ); } #[test] fn integer() { assert_eq!( rsass( "a {b: abs(-17)}\ \n" ) .unwrap(), "a {\ \n b: 17;\ \n}\ \n" ); } } mod positive { #[allow(unused)] use super::rsass; #[test] fn decimal() { assert_eq!( rsass( "a {b: abs(5.6)}\ \n" ) .unwrap(), "a {\ \n b: 5.6;\ \n}\ \n" ); } #[test] fn integer() { assert_eq!( rsass( "a {b: abs(1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } } #[test] #[ignore] // unexepected error fn preserves_units() { assert_eq!( rsass( "a {b: abs(-7px / 4em) * 1em}\ \n" ) .unwrap(), "a {\ \n b: 1.75px;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "a {b: abs(0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/acos.hrx" mod acos { #[allow(unused)] use super::rsass; #[test] fn decimal() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(0.5)}\ \n" ) .unwrap(), "a {\ \n b: 60deg;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "units", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn greater_than_one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(2)}\ \n" ) .unwrap(), "a {\ \n b: NaNdeg;\ \n}\ \n" ); } #[test] fn less_than_negative_one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(-2)}\ \n" ) .unwrap(), "a {\ \n b: NaNdeg;\ \n}\ \n" ); } #[test] fn negative_decimal() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(-0.5)}\ \n" ) .unwrap(), "a {\ \n b: 120deg;\ \n}\ \n" ); } #[test] fn one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(1)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn one_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.acos(1.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/asin.hrx" mod asin { #[allow(unused)] use super::rsass; #[test] fn decimal() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(0.5)}\ \n" ) .unwrap(), "a {\ \n b: 30deg;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "units", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn greater_than_one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(2)}\ \n" ) .unwrap(), "a {\ \n b: NaNdeg;\ \n}\ \n" ); } #[test] fn less_than_negative_one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(-2)}\ \n" ) .unwrap(), "a {\ \n b: NaNdeg;\ \n}\ \n" ); } #[test] fn negative_decimal() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(-0.5)}\ \n" ) .unwrap(), "a {\ \n b: -30deg;\ \n}\ \n" ); } #[test] fn negative_zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(-0.0)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn negative_zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(-0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(1)}\ \n" ) .unwrap(), "a {\ \n b: 90deg;\ \n}\ \n" ); } #[test] fn one_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(1.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 90deg;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(0)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.asin(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/atan.hrx" mod atan { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "units", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: 90deg;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(-1)}\ \n" ) .unwrap(), "a {\ \n b: -45deg;\ \n}\ \n" ); } #[test] fn negative_infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(-1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: -90deg;\ \n}\ \n" ); } #[test] fn negative_zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(-0.0)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn negative_zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(-0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn positive() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(1)}\ \n" ) .unwrap(), "a {\ \n b: 45deg;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(0)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.atan(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0deg;\ \n}\ \n" ); } } mod atan2; // From "sass-spec/spec/core_functions/math/ceil.hrx" mod ceil { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] fn high() { assert_eq!( rsass( "a {b: ceil(2.9)}\ \n" ) .unwrap(), "a {\ \n b: 3;\ \n}\ \n" ); } #[test] fn integer() { assert_eq!( rsass( "a {b: ceil(1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn low() { assert_eq!( rsass( "a {b: ceil(6.000000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 7;\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "a {b: ceil($number: 1.6)}\ \n" ) .unwrap(), "a {\ \n b: 2;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "a {b: ceil(-7.6)}\ \n" ) .unwrap(), "a {\ \n b: -7;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn preserves_units() { assert_eq!( rsass( "a {b: ceil(7px / 4em) * 1em}\ \n" ) .unwrap(), "a {\ \n b: 2px;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/clamp.hrx" mod clamp { #[allow(unused)] use super::rsass; #[test] fn chooses_max() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(0, 2, 1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn chooses_min() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(1, 0, 2)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn chooses_number() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(0, 1, 2)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; mod incompatible_units { #[allow(unused)] use super::rsass; // Ignoring "all", error tests are not supported yet. // Ignoring "min_and_max", error tests are not supported yet. // Ignoring "min_and_number", error tests are not supported yet. // Ignoring "number_and_max", error tests are not supported yet. } // Ignoring "one_arg", error tests are not supported yet. mod some_unitless { #[allow(unused)] use super::rsass; // Ignoring "max", error tests are not supported yet. // Ignoring "min", error tests are not supported yet. // Ignoring "min_and_max", error tests are not supported yet. // Ignoring "min_and_number", error tests are not supported yet. // Ignoring "number", error tests are not supported yet. // Ignoring "number_and_max", error tests are not supported yet. } // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "two_args", error tests are not supported yet. mod test_type { #[allow(unused)] use super::rsass; // Ignoring "max", error tests are not supported yet. // Ignoring "min", error tests are not supported yet. // Ignoring "number", error tests are not supported yet. } // Ignoring "zero_args", error tests are not supported yet. } #[test] fn min_equals_max() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {\ \n b: math.clamp(1, 2, 1);\ \n}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn min_greater_than_max() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {\ \n b: math.clamp(1, 2, 0);\ \n}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn named_args() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp($min: 0, $number: 1, $max: 2)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } mod preserves_units { #[allow(unused)] use super::rsass; #[test] fn max() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(180deg, 1turn, 360deg)}\ \n" ) .unwrap(), "a {\ \n b: 360deg;\ \n}\ \n" ); } #[test] fn min() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(180deg, 0.5turn, 360deg)}\ \n" ) .unwrap(), "a {\ \n b: 180deg;\ \n}\ \n" ); } #[test] fn number() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.clamp(180deg, 0.75turn, 360deg)}\ \n" ) .unwrap(), "a {\ \n b: 0.75turn;\ \n}\ \n" ); } } } // From "sass-spec/spec/core_functions/math/comparable.hrx" mod comparable { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. mod test_type { #[allow(unused)] use super::rsass; // Ignoring "arg_1", error tests are not supported yet. // Ignoring "arg_2", error tests are not supported yet. } } #[test] fn named() { assert_eq!( rsass( "a {b: comparable($number1: 1, $number2: 2)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } mod unit { #[allow(unused)] use super::rsass; #[test] fn to_compatible() { assert_eq!( rsass( "a {b: comparable(1px, 2in)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } #[test] fn to_different() { assert_eq!( rsass( "a {b: comparable(1px, 2em)}\ \n" ) .unwrap(), "a {\ \n b: false;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn to_inverse() { assert_eq!( rsass( "a {b: comparable(1px, 1/1px)}\ \n" ) .unwrap(), "a {\ \n b: false;\ \n}\ \n" ); } #[test] fn to_same() { assert_eq!( rsass( "a {b: comparable(1px, 2px)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } } mod unitless { #[allow(unused)] use super::rsass; #[test] fn to_unit() { assert_eq!( rsass( "a {b: comparable(1, 2px)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } #[test] fn to_unitless() { assert_eq!( rsass( "a {b: comparable(1, 2)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } } } // From "sass-spec/spec/core_functions/math/cos.hrx" mod cos { #[allow(unused)] use super::rsass; #[test] fn deg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1deg)}\ \n" ) .unwrap(), "a {\ \n b: 0.9998476952;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "unit", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn grad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1grad)}\ \n" ) .unwrap(), "a {\ \n b: 0.9998766325;\ \n}\ \n" ); } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn named_arg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos($number: 1)}\ \n" ) .unwrap(), "a {\ \n b: 0.5403023059;\ \n}\ \n" ); } #[test] fn negative_infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(-1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn rad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1rad)}\ \n" ) .unwrap(), "a {\ \n b: 0.5403023059;\ \n}\ \n" ); } #[test] fn turn() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1turn)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn unitless() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.cos(1)}\ \n" ) .unwrap(), "a {\ \n b: 0.5403023059;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/floor.hrx" mod floor { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] fn high() { assert_eq!( rsass( "a {b: floor(2.999999999999999)}\ \n" ) .unwrap(), "a {\ \n b: 2;\ \n}\ \n" ); } #[test] fn integer() { assert_eq!( rsass( "a {b: floor(1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn low() { assert_eq!( rsass( "a {b: floor(6.1)}\ \n" ) .unwrap(), "a {\ \n b: 6;\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "a {b: floor($number: 1.6)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "a {b: floor(-7.2)}\ \n" ) .unwrap(), "a {\ \n b: -8;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn preserves_units() { assert_eq!( rsass( "a {b: floor(7px / 4em) * 1em}\ \n" ) .unwrap(), "a {\ \n b: 1px;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/hypot.hrx" mod hypot { #[allow(unused)] use super::rsass; #[test] fn compatible_units() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.hypot(3cm, 4mm * 10, 5q * 40, 6in / 2.54, 7px * 96 / 2.54)}\ \n" ) .unwrap(), "a {\ \n b: 11.6189500386cm;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; mod incompatible_units { #[allow(unused)] use super::rsass; // Ignoring "all", error tests are not supported yet. // Ignoring "first_and_second", error tests are not supported yet. // Ignoring "first_and_third", error tests are not supported yet. // Ignoring "second_and_third", error tests are not supported yet. } mod some_unitless { #[allow(unused)] use super::rsass; // Ignoring "first", error tests are not supported yet. // Ignoring "first_and_second", error tests are not supported yet. // Ignoring "first_and_third", error tests are not supported yet. // Ignoring "second", error tests are not supported yet. // Ignoring "second_and_third", error tests are not supported yet. // Ignoring "third", error tests are not supported yet. } mod test_type { #[allow(unused)] use super::rsass; // Ignoring "first", error tests are not supported yet. // Ignoring "second", error tests are not supported yet. // Ignoring "third", error tests are not supported yet. } // Ignoring "zero_args", error tests are not supported yet. } mod infinity { #[allow(unused)] use super::rsass; #[test] fn first() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.hypot(1/0, 1, 1)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } #[test] fn second() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.hypot(1, 1/0, 1)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } #[test] fn third() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.hypot(1, 1, 1/0)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } } #[test] fn unitless() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.hypot(3, 4, 5, 6, 7)}\ \n" ) .unwrap(), "a {\ \n b: 11.6189500386;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/log.hrx" mod log { #[allow(unused)] use super::rsass; mod base { #[allow(unused)] use super::rsass; #[test] fn between_zero_and_one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 0.5)}\ \n" ) .unwrap(), "a {\ \n b: -1;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, -1)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn null() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, null)}\ \n" ) .unwrap(), "a {\ \n b: 0.6931471806;\ \n}\ \n" ); } #[test] fn one() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 1)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } #[test] fn one_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 1.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } #[test] fn positive() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 10)}\ \n" ) .unwrap(), "a {\ \n b: 0.3010299957;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2, 0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } } mod error { #[allow(unused)] use super::rsass; // Ignoring "base_has_units", error tests are not supported yet. // Ignoring "number_has_units", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } mod named_arg { #[allow(unused)] use super::rsass; #[test] fn number() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log($number: 2)}\ \n" ) .unwrap(), "a {\ \n b: 0.6931471806;\ \n}\ \n" ); } } mod named_args { #[allow(unused)] use super::rsass; #[test] fn number_with_base() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log($number: 2, $base: 10)}\ \n" ) .unwrap(), "a {\ \n b: 0.3010299957;\ \n}\ \n" ); } } #[test] fn negative() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(-1)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn positive() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(2)}\ \n" ) .unwrap(), "a {\ \n b: 0.6931471806;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(0)}\ \n" ) .unwrap(), "a {\ \n b: -Infinity;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.log(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: -Infinity;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/max.hrx" mod max { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "incompatible_units", error tests are not supported yet. // Ignoring "too_few_args", error tests are not supported yet. mod test_type { #[allow(unused)] use super::rsass; // Ignoring "arg_1", error tests are not supported yet. // Ignoring "arg_2", error tests are not supported yet. // Ignoring "arg_3", error tests are not supported yet. } } #[test] fn one_arg() { assert_eq!( rsass( "$arg: 1;\ \na {b: max($arg)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn three_args() { assert_eq!( rsass( "$arg: 1;\ \na {b: max(3, $arg, 2)}\ \n" ) .unwrap(), "a {\ \n b: 3;\ \n}\ \n" ); } #[test] fn two_args() { assert_eq!( rsass( "$arg: 1;\ \na {b: max($arg, 2)}\ \n" ) .unwrap(), "a {\ \n b: 2;\ \n}\ \n" ); } mod units { #[allow(unused)] use super::rsass; #[test] fn and_unitless() { assert_eq!( rsass( "$arg: 2px;\ \na {b: max($arg, 1)}\ \n" ) .unwrap(), "a {\ \n b: 2px;\ \n}\ \n" ); } #[test] fn compatible() { assert_eq!( rsass( "$arg: 1px;\ \na {b: max($arg, 1in, 1cm)}\ \n" ) .unwrap(), "a {\ \n b: 1in;\ \n}\ \n" ); } #[test] fn same() { assert_eq!( rsass( "$arg: 6px;\ \na {b: max($arg, 2px, 10px)}\ \n" ) .unwrap(), "a {\ \n b: 10px;\ \n}\ \n" ); } } } // From "sass-spec/spec/core_functions/math/min.hrx" mod min { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "incompatible_units", error tests are not supported yet. // Ignoring "too_few_args", error tests are not supported yet. mod test_type { #[allow(unused)] use super::rsass; // Ignoring "arg_1", error tests are not supported yet. // Ignoring "arg_2", error tests are not supported yet. // Ignoring "arg_3", error tests are not supported yet. } } #[test] fn one_arg() { assert_eq!( rsass( "$arg: 1;\ \na {b: min($arg)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn three_args() { assert_eq!( rsass( "$arg: 1;\ \na {b: min(3, $arg, 2)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn two_args() { assert_eq!( rsass( "$arg: 1;\ \na {b: min($arg, 2)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } mod units { #[allow(unused)] use super::rsass; #[test] fn and_unitless() { assert_eq!( rsass( "$arg: 2px;\ \na {b: min($arg, 1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn compatible() { assert_eq!( rsass( "$arg: 1px;\ \na {b: min($arg, 1in, 1cm)}\ \n" ) .unwrap(), "a {\ \n b: 1px;\ \n}\ \n" ); } #[test] fn same() { assert_eq!( rsass( "$arg: 6px;\ \na {b: min($arg, 2px, 10px)}\ \n" ) .unwrap(), "a {\ \n b: 2px;\ \n}\ \n" ); } } } // From "sass-spec/spec/core_functions/math/percentage.hrx" mod percentage { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "unit", error tests are not supported yet. } #[test] fn integer() { assert_eq!( rsass( "a {b: percentage(42)}\ \n" ) .unwrap(), "a {\ \n b: 4200%;\ \n}\ \n" ); } #[test] fn large() { assert_eq!( rsass( "a {b: percentage(123.456)}\ \n" ) .unwrap(), "a {\ \n b: 12345.6%;\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "a {b: percentage($number: 1)}\ \n" ) .unwrap(), "a {\ \n b: 100%;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "a {b: percentage(-0.4)}\ \n" ) .unwrap(), "a {\ \n b: -40%;\ \n}\ \n" ); } #[test] fn small() { assert_eq!( rsass( "a {b: percentage(0.246)}\ \n" ) .unwrap(), "a {\ \n b: 24.6%;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "a {b: percentage(0)}\ \n" ) .unwrap(), "a {\ \n b: 0%;\ \n}\ \n" ); } } mod pow; // From "sass-spec/spec/core_functions/math/random.hrx" mod random { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "decimal", error tests are not supported yet. // Ignoring "negative", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "zero", error tests are not supported yet. } #[test] fn ignores_units() { assert_eq!( rsass( "a {b: random(1px)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "$value: random($limit: 10);\ \na {b: $value > 0 and $value <= 10}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } #[test] fn no_arg() { assert_eq!( rsass( "$value: random();\ \na {b: $value >= 0 and $value < 1}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn null() { assert_eq!( rsass( "@import \"../util\";\ \n@function check($value) {@return $value >= 0 and $value < 1}\ \n@include check-values(null, get-function(check));\ \n" ) .unwrap(), "" ); } #[test] #[ignore] // unexepected error fn one() { assert_eq!( rsass( "@import \"../util\";\ \n@function check($value) {@return $value == 1}\ \n@include check-values(1, get-function(check));\ \n" ) .unwrap(), "" ); } #[test] #[ignore] // unexepected error fn one_hundred() { assert_eq!( rsass( "@import \"../util\";\ \n@function check($value) {@return $value == round($value) and $value > 0 and $value <= 100}\ \n@include check-values(100, get-function(check));\ \n" ) .unwrap(), "" ); } #[test] #[ignore] // unexepected error fn two() { assert_eq!( rsass( "@import \"../util\";\ \n@function check($value) {@return $value == 1 or $value == 2}\ \n@include check-values(2, get-function(check));\ \n" ) .unwrap(), "" ); } #[test] fn within_precision() { assert_eq!( rsass( "// This is within the precision limit to be considered identical to 1.\ \na {b: random(1.0000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/round.hrx" mod round { #[allow(unused)] use super::rsass; mod down { #[allow(unused)] use super::rsass; #[test] fn low() { assert_eq!( rsass( "a {b: round(2.2)}\ \n" ) .unwrap(), "a {\ \n b: 2;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "a {b: round(-5.6)}\ \n" ) .unwrap(), "a {\ \n b: -6;\ \n}\ \n" ); } #[test] fn to_zero() { assert_eq!( rsass( "a {b: round(0.2)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn within_precision() { assert_eq!( rsass( "// This is the largest number that\'s representable as a float and outside the\ \n// precision range to be considered equal to 5.\ \na {b: round(1.49999999999)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] fn integer() { assert_eq!( rsass( "a {b: round(1)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "a {b: round($number: 1.6)}\ \n" ) .unwrap(), "a {\ \n b: 2;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn preserves_units() { assert_eq!( rsass( "a {b: round(7px / 4em) * 1em}\ \n" ) .unwrap(), "a {\ \n b: 2px;\ \n}\ \n" ); } mod up { #[allow(unused)] use super::rsass; #[test] fn high() { assert_eq!( rsass( "a {b: round(2.9)}\ \n" ) .unwrap(), "a {\ \n b: 3;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "a {b: round(-5.4)}\ \n" ) .unwrap(), "a {\ \n b: -5;\ \n}\ \n" ); } #[test] fn point_five() { assert_eq!( rsass( "a {b: round(16.5)}\ \n" ) .unwrap(), "a {\ \n b: 17;\ \n}\ \n" ); } #[test] fn to_zero() { assert_eq!( rsass( "a {b: round(-0.2)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] #[ignore] // wrong result fn within_precision() { assert_eq!( rsass( "// This is the smallest number that\'s representable as a float and in the\ \n// precision range to be considered equal to 5.\ \na {b: round(0.4999999999900001)}\ \n" ) .unwrap(), "a {\ \n b: 1;\ \n}\ \n" ); } } } // From "sass-spec/spec/core_functions/math/sin.hrx" mod sin { #[allow(unused)] use super::rsass; #[test] fn deg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1deg)}\ \n" ) .unwrap(), "a {\ \n b: 0.0174524064;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "unit", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] #[ignore] // wrong result fn grad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1grad)}\ \n" ) .unwrap(), "a {\ \n b: 0.0157073173;\ \n}\ \n" ); } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn named_arg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin($number: 1)}\ \n" ) .unwrap(), "a {\ \n b: 0.8414709848;\ \n}\ \n" ); } #[test] fn negative_infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(-1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn negative_zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(-0.0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn negative_zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(-0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn rad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1rad)}\ \n" ) .unwrap(), "a {\ \n b: 0.8414709848;\ \n}\ \n" ); } #[test] fn turn() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1turn)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn unitless() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(1)}\ \n" ) .unwrap(), "a {\ \n b: 0.8414709848;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sin(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/sqrt.hrx" mod sqrt { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "units", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } #[test] fn named_arg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt($number: 2)}\ \n" ) .unwrap(), "a {\ \n b: 1.4142135624;\ \n}\ \n" ); } #[test] fn negative() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(-1)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn negative_zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(-0.0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn negative_zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(-0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn positive() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(2)}\ \n" ) .unwrap(), "a {\ \n b: 1.4142135624;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.sqrt(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/tan.hrx" mod tan { #[allow(unused)] use super::rsass; mod asymptote { #[allow(unused)] use super::rsass; #[test] #[ignore] // unexepected error fn radian() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(0.5rad * math.$pi)}\ \n" ) .unwrap(), "a {\ \n b: Infinity;\ \n}\ \n" ); } } #[test] fn deg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1deg)}\ \n" ) .unwrap(), "a {\ \n b: 0.0174550649;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. // Ignoring "unit", error tests are not supported yet. // Ignoring "zero_args", error tests are not supported yet. } #[test] #[ignore] // wrong result fn grad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1grad)}\ \n" ) .unwrap(), "a {\ \n b: 0.0157092553;\ \n}\ \n" ); } #[test] fn infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn named_arg() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan($number: 1)}\ \n" ) .unwrap(), "a {\ \n b: 1.5574077247;\ \n}\ \n" ); } mod negative_asymptote { #[allow(unused)] use super::rsass; #[test] #[ignore] // unexepected error fn radian() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(-0.5rad * math.$pi)}\ \n" ) .unwrap(), "a {\ \n b: -Infinity;\ \n}\ \n" ); } } #[test] fn negative_infinity() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(-1 / 0)}\ \n" ) .unwrap(), "a {\ \n b: NaN;\ \n}\ \n" ); } #[test] fn negative_zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(-0.0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn negative_zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(-0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn rad() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1rad)}\ \n" ) .unwrap(), "a {\ \n b: 1.5574077247;\ \n}\ \n" ); } #[test] fn turn() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1turn)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn unitless() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(1)}\ \n" ) .unwrap(), "a {\ \n b: 1.5574077247;\ \n}\ \n" ); } #[test] fn zero() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(0)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } #[test] fn zero_fuzzy() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.tan(0.000000000001)}\ \n" ) .unwrap(), "a {\ \n b: 0;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/unit.hrx" mod unit { #[allow(unused)] use super::rsass; mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] #[ignore] // wrong result fn multiple_denominators() { assert_eq!( rsass( "a {b: unit(1 / 1px / 3em / 4rad)}\ \n" ) .unwrap(), "a {\ \n b: \"(px*em*rad)^-1\";\ \n}\ \n" ); } #[test] #[ignore] // wrong result fn multiple_numerators() { assert_eq!( rsass( "a {b: unit(1px * 1em * 1rad)}\ \n" ) .unwrap(), "a {\ \n b: \"px*em*rad\";\ \n}\ \n" ); } #[test] fn named() { assert_eq!( rsass( "a {b: unit($number: 1)}\ \n" ) .unwrap(), "a {\ \n b: \"\";\ \n}\ \n" ); } #[test] fn none() { assert_eq!( rsass( "a {b: unit(1)}\ \n" ) .unwrap(), "a {\ \n b: \"\";\ \n}\ \n" ); } mod numerator_and_denominator { #[allow(unused)] use super::rsass; #[test] #[ignore] // wrong result fn multiple() { assert_eq!( rsass( "a {b: unit(1px * 1em / 1rad / 1s)}\ \n" ) .unwrap(), "a {\ \n b: \"px*em/rad*s\";\ \n}\ \n" ); } #[test] #[ignore] // wrong result fn single() { assert_eq!( rsass( "a {b: unit(1px / 1em)}\ \n" ) .unwrap(), "a {\ \n b: \"px/em\";\ \n}\ \n" ); } } #[test] #[ignore] // wrong result fn one_denominator() { assert_eq!( rsass( "a {b: unit(1/1px)}\ \n" ) .unwrap(), "a {\ \n b: \"px^-1\";\ \n}\ \n" ); } #[test] fn one_numerator() { assert_eq!( rsass( "a {b: unit(1px)}\ \n" ) .unwrap(), "a {\ \n b: \"px\";\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/unitless.hrx" mod unitless { #[allow(unused)] use super::rsass; #[test] #[ignore] // unexepected error fn denominator() { assert_eq!( rsass( "a {b: unitless(1/1px)}\ \n" ) .unwrap(), "a {\ \n b: false;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; // Ignoring "too_few_args", error tests are not supported yet. // Ignoring "too_many_args", error tests are not supported yet. // Ignoring "test_type", error tests are not supported yet. } #[test] fn named() { assert_eq!( rsass( "a {b: unitless($number: 100)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } #[test] fn numerator() { assert_eq!( rsass( "a {b: unitless(1px)}\ \n" ) .unwrap(), "a {\ \n b: false;\ \n}\ \n" ); } #[test] #[ignore] // unexepected error fn numerator_and_denominator() { assert_eq!( rsass( "a {b: unitless(1px/1em)}\ \n" ) .unwrap(), "a {\ \n b: false;\ \n}\ \n" ); } #[test] fn unitless() { assert_eq!( rsass( "a {b: unitless(1)}\ \n" ) .unwrap(), "a {\ \n b: true;\ \n}\ \n" ); } } // From "sass-spec/spec/core_functions/math/variables.hrx" mod variables { #[allow(unused)] use super::rsass; #[test] #[ignore] // unexepected error fn e() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.$e}\ \n" ) .unwrap(), "a {\ \n b: 2.7182818285;\ \n}\ \n" ); } mod error { #[allow(unused)] use super::rsass; mod assignment { #[allow(unused)] use super::rsass; // Ignoring "e", error tests are not supported yet. // Ignoring "pi", error tests are not supported yet. } } #[test] #[ignore] // unexepected error fn pi() { assert_eq!( rsass( "@use \"sass:math\" as math;\ \na {b: math.$pi}\ \n" ) .unwrap(), "a {\ \n b: 3.1415926536;\ \n}\ \n" ); } }
use crate::crypto::hash::{Hashable, H256}; use crate::experiment::performance_counter::PayloadSize; use bincode::serialize; use std::cell::RefCell; use std::hash::Hash; /// A unique identifier of a transaction output, a.k.a. a coin. #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CoinId { /// The hash of the transaction that produces this coin. pub hash: H256, /// The index of the coin in the output list of the transaction that produces this coin. pub index: u32, } /// An address of a user. It is the SHA256 hash of the user's public key. pub type Address = H256; /// An input of a transaction. #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Input { /// The identifier of the input coin. pub coin: CoinId, /// The amount of this input. // TODO: this is redundant, since it is also stored in the transaction output. We need it to do // rollback. pub value: u64, /// The address of the owner of this input coin. // TODO: this is redundant, since it is also stored in the transaction output. We need it to do // rollback. pub owner: Address, } /// An output of a transaction. // TODO: coinbase output (transaction fee). Maybe we don't need that in this case. #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Output { /// The amount of this output. pub value: u64, /// The address of the recipient of this output coin. pub recipient: Address, } /// A Prism transaction. It takes a set of existing coins (inputs) and transforms them into a set /// of coins (outputs). #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Transaction { /// The list of inputs put into this transaction. pub input: Vec<Input>, /// The list of outputs generated by this transaction. pub output: Vec<Output>, /// Authorization of this transaction by the owners of the inputs. pub authorization: Vec<Authorization>, #[serde(skip)] pub hash: RefCell<Option<H256>>, } impl PayloadSize for Transaction { /// Return the size in bytes fn size(&self) -> usize { self.input.len() * std::mem::size_of::<Input>() + self.output.len() * std::mem::size_of::<Output>() + self.authorization.len() * std::mem::size_of::<Authorization>() } } impl Hashable for Transaction { fn hash(&self) -> H256 { let hash = self.hash.borrow(); if let Some(h) = *hash { return h; } drop(hash); let mut hash_mut = self.hash.borrow_mut(); let hash: H256 = ring::digest::digest(&ring::digest::SHA256, &serialize(self).unwrap()).into(); *hash_mut = Some(hash); hash } } /// Authorization of the transaction by the owner of an input coin. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Authorization { /// The public key of the owner. pub pubkey: Vec<u8>, /// The signature of the transaction input and output pub signature: Vec<u8>, } #[cfg(any(test))] pub mod tests {}
use std::io::Read; // 析构函数 // 所谓“析构函数”(destructor)是与“构造函数”(constructor)相对应的概念。 // “构造函数”是对象被创建的时候调用的函数。 // “析构函数”是对象被销毁的时候调用的函数。 // Rust 中没有统一的“构造函数”这个语法,对象的构造是直接对每个成员进行初始化完成的, // 一般将对象的创建封装到普通静态函数中。 // 相对于构造函数,析构函数有更重要的作用。 // 它会在对象消亡之前由编译器自动调用,特别适合承担对象销毁时释放所拥有资源的作用。 // 比如, // Vec 类型在使用的过程中,会根据情况动态申请内存,当变量的生命周期结束时,就会触发该类型析构函数的调用。 // 在析构函数中,就有机会将所拥有的内存释放掉。在析构函数中,还可以根据需要编写特定的逻辑,从而达到更多的目的。 // 析构函数不仅可以用于管理内存资源,还能用于管理更多的其他资源,如文件、锁、socket 等。 // 在 C++ 中,利用变量生命周期绑定资源的使用周期,已经是一种常用的编程惯例。 // 此手法被称为 RAII(Resource Acquisition Is Initialization)。 // 在变量生命周期开始时申请资源,在变量生命周期结束时利用析构函数释放资源, // 从而达到自动化管理资源的作用,很大程度上减少了资源的泄露和误用。 // 在 Rust 中编写“析构函数”的办法是 impl std::ops::Drop。 // Drop trait 允许在对象即将消亡之时,自动调用指定代码。 // 对于具有多个局部变量的情况,析构函数的调用顺序是:先构造的后析构,后构造的先析构。 // 因为局部变量存在于一个“栈结构”中,要保持“先进后出”的策略。 pub fn first() { fn test1() { use std::ops::Drop; struct D(i32); impl Drop for D { fn drop(&mut self) { println!("destruct {}", self.0); } } let _x = D(1); println!("construct 1"); { let _y = D(2); println!("construct 2"); println!("exit inner scope"); } println!("exit test1 function"); } test1(); } pub fn second() { // 资源管理 // 在创建变量的时候获取某种资源,在变量生命周期结束的时候释放资源,是一种常见的设计模式。 // 资源,不仅可以包括内存,还可以包括其他向操作系统申请的资源。 // 比如 File,会在创建和使用的过程中向操作系统申请打开文件,在它的析构函数中就会去释放文件。 // 所以,RAII 手法是比 GC 更通用的资源管理手段,GC 只能管理内存,RAII 可以管理各种资源。 fn test1() { use std::fs::File; use std::io::Read; // 打开文件 let f = File::open("test.txt"); if f.is_err() { println!("file is not exist."); return; } let mut f = f.unwrap(); let mut content = String::new(); // 读取文件 let result = f.read_to_string(&mut content); if result.is_err() { println!("read file error."); return; } println!("{}", result.unwrap()); println!("{}", content); // 关闭文件 // 不需要手动关闭文件,因为在 File 类型的析构函数中已经处理好了关闭文件的相关操作 // 再比如标准库中的各种复杂数据结构(如 Vec LinkedList HashMap 等), // 它们管理了很多在堆上动态分配的内存。它们也是利用“析构函数”这个功能, // 在生命终结之前释放了申请的内存空间,因此无须像 C 语言那样手动调用 free 函数。 } test1(); // 主动析构 // 一般情况下,局部变量的生命周期是从它的声明开始,到当前语句块结束。 // 然而,也可以手动提前结束它的生命周期。 // 请注意,主动调用对象的析构函数是非法的。 // 不过,可以调用标准库中的 std::mem::drop 函数 fn test2() { let p = Box::new(22); // p.drop(); // 非法 std::mem::drop(p); // println!("{}", p); // 错误的调用,因为在上一句代码已经把 p 析构了。 // drop 函数的实现为空 // drop 函数不需要任何的函数体,只需要参数为“值传递”即可。 // 将对象的所有权移入函数中,什么都不用做,编译器就会自动释放掉这个对象。 // 这个 drop 函数的关键在于使用 move 语义把参数传进来,使得变量的所有权从调用 // 方移动到 drop 函数体内,参数类型一定要是 T ,而不是 &T 或者其他引用类型。 // 函数体本身其实根本不重要,重要的是把变量的所有权 move 进入这个函数体中, // 函数调用结束的时候该变量的生命周期结束,变量的析构函数会自动调用,管理的内存空间也会自然释放。 // 这个过程完全符合生命周期、move 语义,无须编译器做特殊处理。 // 其次,对于 Copy 类型的变量,对它调用 std::mem::drop 函数是没有意义的。 let x = 1; println!("before drop {}", x); std::mem::drop(x); println!("after drop {}", x); // Copy 类型在函数参数传递的时候执行的是复制语义,原来的那个变量依然存在, // 传入函数中的只是一个复制品,因此原变量的生命周期不会受到影响。 // 变量遮蔽(Shadowing)不会导致变量生命周期提前结束,它不等同于 drop。 struct D(i32); impl Drop for D { fn drop(&mut self) { println!("destructor for {}", self.0); } } let x = D(1); println!("construct first variable"); let x = D(2); println!("construct second variable"); // 在第二个 x 出现的时候,虽然将第一个 x 遮蔽起来了, // 但是第一个 x 的生命周期并未结束,它依然存在,直到函数退出。 // 这说明了,虽然这两个变量绑定了同一个名字,但在编译器内部依然将它们视为两个不同的变量。 // 注意,下划线这个特殊符号。 // 如果用下划线来绑定一个变量,那么这个变量会当场执行析构, // 而不是等到当前语句块结束的时候再执行。下划线是特殊符号,不是普通标识符。 let _x = D(11); let _ = D(12); // 当场析构 let _y = D(13); // 用下划线绑定的那个变量当场就执行了析构,而其他两个变量等到语句块结束了才执行析构, // 而且析构顺序和初始化顺序刚好相反(栈)。 // 区分 std::mem::drop() 函数和 std::ops::Drop::drop() 方法 // 1. std::mem::drop() 函数是一个独立的函数,不是某个类型的成员方法, // 它由程序员主动调用,作用是使变量的生命周期提前结束; // std::ops::Drop::drop() 方法是一个 trait 中定义的方法, // 当变量的生命周期结束的时候,编译器会自动调用,手动调用是不允许的。 // 2. std::mem::drop<T>(_ x: T) 的参数类型是 T,采用的是 move 语义; // std::ops::Drop::drop(&mut self) 的参数类型是 &mut Self,采用的是可变借用。 // 在析构函数调用过程中,还有机会读取或者修改此对象的属性。 } test2(); // Drop vs. Copy // 要想实现 Copy trait,类型必须满足一定条件。 // 这个条件是:如果一个类型可以使用 memcpy 的方式执行复制操作, // 且没有内存安全问题,那么它才能被允许实现 Copy trait。 // 反过来,所有满足 Copy trait 的类型,在需要执行 move 语义的时候, // 使用 memcpy 复制一份副本,不删除原件是完全不会产生安全问题的。 // 注意:带有析构函数的类型都是不能满足 Copy 语义的。 // 因为不能保证,对于带析构函数的类型,使用 memcpy 复制一个副本一定不会有内存安全问题。 // 所以对于这种情况,编译器直接禁止了。 // 带有析构函数的类型是不能 Copy 的。这两个身份是不能同时存在于一个类型上的。 /* 编译错误 fn test3() { struct T; impl Drop for T { fn drop(&mut self) {} } impl Copy for T {} } test3(); */ // 析构标记 // 在 Rust 里面,析构函数是在变量生命周期结束的时候被调用的。 // 然而,既然可以手动提前终止变量的生命周期,那么就说明,变量的生命周期并不是简单地 // 与某个代码块一致,生命周期何时结束,很可能是由运行时的条件决定的。 // 变量的析构函数调用时机是有可能在运行阶段发生改变的。 fn test4() { use std::ops::Drop; use std::mem::drop; struct D(&'static str); impl Drop for D { fn drop(&mut self) { println!("destructor {}", self.0); } } // 获取 DROP 环境变量的值,并转换为整数 fn condition() -> Option<u32> { std::env::var("DROP") .map(|s| s.parse::<u32>().unwrap_or(0)) .ok() } let var = (D("first"), D("second"), D("third")); match condition() { Some(1) => drop(var.0), Some(2) => drop(var.1), Some(3) => drop(var.2), _ => {} } println!("test4 end"); // 编译器对析构标记的实现 // 首先判断一个变量是否可能会在多个不同的路径上发生析构, // 如果是,那么它会在当前函数调用栈中自动插入一个 bool 类型的标记, // 用于标记该对象的析构函数是否已经被调用。 // 原理是在析构函数被调用的时候,把标记设置一个状态, // 在各个可能调用析构函数的地方都先判断一下状态再调用析构函数。 // 这样,编译阶段确定生命周期和执行阶段根据情况调用就统一起来了。 // 具体实现看原书代码 } test4(); }
#![allow(overflowing_literals)] use super::decoder::*; use super::*; pub mod rtype; pub mod stype; pub mod itype; pub mod ujtype; pub mod utype; pub mod sbtype; #[cfg(test)] mod implementer_test; pub fn handle_fence(_regfile: &mut [u32], bytes: &[u8], _pc: &mut u32) -> Result<(), ExecutionError> { let opcode = get_opcode(bytes); let _rd = get_rd(bytes); let f3 = get_f3(bytes); let _rs1 = get_rs1(bytes); //TODO: decode immediate if opcode == 0x00FF && f3 == 0x0 { return Err(ExecutionError::Unimplemented("FENCE".into())); } else if opcode == 0x00FF && f3 == 0x1 { return Err(ExecutionError::Unimplemented("FENCE.I".into())); } else { return Err(ExecutionError::InvalidInstruction(encode_hex(bytes))); } }
use itertools::Itertools; use kurbo::{ BezPath, Line, ParamCurve, ParamCurveArclen, ParamCurveNearest, PathSeg, Point, Shape, }; pub trait BezPathExt { fn divide_at_intersections( &self, other: &BezPath, ) -> (Vec<BezPath>, Vec<Point>); fn divide_between_intersections( &self, other: &BezPath, ) -> (Vec<BezPath>, Vec<Point>); fn as_flat(&self, tolerance: f64) -> BezPath; fn subsegment(&self, t: f64) -> (BezPath, BezPath); fn regions(&self) -> Vec<BezPath>; fn contains_by_intersection_count(&self, point: Point) -> bool; fn distance_to_nearest(&self, point: Point) -> f64; } impl BezPathExt for BezPath { fn divide_at_intersections( &self, other: &BezPath, ) -> (Vec<BezPath>, Vec<Point>) { let min_distance_adjacent = 5.0; let mut output_sections: Vec<BezPath> = Vec::new(); let mut output_points: Vec<Point> = Vec::new(); let mut current: Vec<PathSeg> = Vec::new(); // Called for each potential intersection. Makes sure the // path length is non-trivial to avoid spurious intersections. let mut flush = |current: &mut Vec<PathSeg>, is_last: bool| { let pathlen = current.iter().map(|seg| seg.arclen(1e-3)).sum::<f64>(); if pathlen > min_distance_adjacent { let completed = std::mem::replace(current, Vec::new()); if is_last { output_sections[0] = BezPath::from_path_segments( completed .into_iter() .chain(output_sections[0].segments()), ); } else { output_points.push(completed.last().unwrap().eval(1.0)); let path = BezPath::from_path_segments(completed.into_iter()); output_sections.push(path); } } }; self.segments().for_each(|seg| { // Exclude intersections from the segment itself, or from // adjacent segments, in the case of looking for // self-intersections. Could cause missed intersections // that occur directly at boundary between segments. let split_by = BezPath::from_path_segments(other.segments().filter(|&os| { (os != seg) && (os.start() != seg.end()) && (os.end() != seg.start()) })) .as_flat(0.25); // List of intersections with this particular segment. let mut t_list: Vec<_> = split_by .segments() .flat_map(|line| { if let PathSeg::Line(line) = line { return seg.intersect_line(line); } panic!(); }) .map(|intersection| intersection.segment_t) .collect(); t_list.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap()); // Push either segment or subsegment to the current chunk. if t_list.is_empty() { current.push(seg); } else { current.push(seg.subsegment(0.0..*t_list.first().unwrap())); flush(&mut current, false); t_list.iter().tuple_windows().for_each(|(&t1, &t2)| { current.push(seg.subsegment(t1..t2)); flush(&mut current, false); }); current.push(seg.subsegment(*t_list.last().unwrap()..1.0)); } }); flush(&mut current, true); (output_sections, output_points) } fn divide_between_intersections( &self, other: &BezPath, ) -> (Vec<BezPath>, Vec<Point>) { let (subpaths, intersections) = self.divide_at_intersections(other); let path_halves: Vec<_> = subpaths .into_iter() .flat_map(|path| { let (a, b) = path.subsegment(0.5); vec![a, b].into_iter() }) .collect(); let mut output: Vec<BezPath> = Vec::new(); let first = path_halves.first().unwrap().clone(); path_halves .iter() .skip(1) .tuple_windows() .step_by(2) .for_each(|(a, b)| { output.push(BezPath::from_path_segments( a.segments().chain(b.segments()), )); }); let last = path_halves.last().unwrap().clone(); output.push(BezPath::from_path_segments( last.segments().chain(first.segments()), )); (output, intersections) } fn as_flat(&self, tolerance: f64) -> BezPath { let mut elements = Vec::new(); self.flatten(tolerance, |pathel| elements.push(pathel)); BezPath::from_vec(elements) } fn subsegment(&self, t: f64) -> (BezPath, BezPath) { let accuracy = 1e-3; let length = self.segments().map(|s| s.arclen(accuracy)).sum::<f64>(); let target_length = length * t; let (split_i, split_seg_a, split_seg_b) = self .segments() .enumerate() .scan(0.0, |state, (i, seg)| { let length_pre = *state; *state += seg.arclen(accuracy); let length_post = *state; Some((length_pre, i, seg, length_post)) }) .filter(|(_, _, _, length_post)| length_post >= &target_length) .next() .map(|(length_pre, i, seg, _)| { let t = seg.inv_arclen(target_length - length_pre, accuracy); (i, seg.subsegment(0.0..t), seg.subsegment(t..1.0)) }) .unwrap(); ( BezPath::from_path_segments( self.segments() .take(split_i) .chain(std::iter::once(split_seg_a)), ), BezPath::from_path_segments( std::iter::once(split_seg_b) .chain(self.segments().skip(split_i + 1)), ), ) } fn regions(&self) -> Vec<BezPath> { self.elements() .split_inclusive(|&pathel| pathel == kurbo::PathEl::ClosePath) .map(|elements| elements.iter().map(|x| *x).collect()) .collect() } fn contains_by_intersection_count(&self, point: Point) -> bool { let bbox = self.bounding_box(); if bbox.contains(point) { let outside_point = Point::new(bbox.min_x() - 1.0, bbox.min_y()); let line = Line::new(point, outside_point); let num_intersections = self .segments() .map(|seg| seg.intersect_line(line).len()) .sum::<usize>(); num_intersections % 2 != 0 } else { false } } fn distance_to_nearest(&self, point: Point) -> f64 { self.segments() .map(|seg| seg.nearest(point, 1e-3).distance_sq) .min_by(|a, b| a.partial_cmp(b).unwrap()) .unwrap() .sqrt() } }
extern crate slackbot; extern crate bearbot; extern crate regex; extern crate dotenv; extern crate iron; use slackbot::{SlackBot, Sender}; use bearbot::handlers; use dotenv::dotenv; use std::env; use std::thread; use iron::prelude::*; use iron::status; fn main() { dotenv().ok(); let username = env::var("USERNAME").unwrap(); let token = env::var("SLACK_API_TOKEN").unwrap(); let mut bot = SlackBot::new(username.to_owned(), token); bot.on(r"login (?P<email>[^\s]*) (?P<password>.*)", Box::new(handlers::SessionHandler)); bot.on(r"search (?P<keywords>.*)", Box::new(handlers::FindTalentsHandler)); bot.on(format!(r"(hi|hey|hello|hallo) {}", username), Box::new(|sender: &mut Sender, _: &regex::Captures| { sender.respond_in_channel("Hey <3").unwrap(); })); let host = env::var("HTTP_HOST"); let port = env::var("HTTP_PORT").or(env::var("PORT")); if host.is_ok() && port.is_ok() { thread::spawn(move || { let host = format!("{}:{}", host.unwrap(), port.unwrap()); let server = Iron::new(|_: &mut Request| { Ok(Response::with((status::Ok, "Hello world!"))) }).http(&*host); match server { Ok(_) => { println!("Listening on {}", host); }, Err(e) => { panic!("{:?}", e); } } }); } match bot.run() { Ok(_) => {}, Err(e) => { panic!("{:?}", e); } }; }
pub use {Deserialize, Removed, ReprC, Serialize, WithSchema, Serializer, Deserializer, Introspect, introspect_item, IntrospectItem, SavefileError, load, save, load_noschema, save_noschema, Introspector, IntrospectionResult,IntrospectorNavCommand,IntrospectedElementKey, Schema, SchemaStruct, SchemaPrimitive, SchemaEnum, load_from_mem, save_to_mem, Variant, Field, CryptoWriter, CryptoReader, load_encrypted_file, save_encrypted_file, save_file,load_file,save_file_noschema,load_file_noschema,Canary1};
use crate::validator::config::Config; use crate::validator::Validator; use clap::{App, Arg, ArgMatches, SubCommand}; use dotenv; use log::{error, trace}; use std::process; use toml; mod validator; fn main() { // load environment variables from .env file if dotenv::dotenv().is_err() { eprint!("failed to read .env file - the logging is unlikely to work correctly") } // if we want to log to file or use different logger, we'd need to replace it here. // a better alternative, but way more complex would be `slog` crate - we should // perhaps research it at some point. pretty_env_logger::init(); let arg_matches = App::new("Nym Validator") .version(built_info::PKG_VERSION) .author("Nymtech") .about("Implementation of Nym Validator") .subcommand( SubCommand::with_name("run") .about("Starts the validator") .arg( Arg::with_name("config") .long("config") .help("Location of the validator configuration file") .takes_value(true) .required(true), ), ) .get_matches(); if let Err(e) = execute(arg_matches) { error!("{:?}", e); process::exit(1); } } fn run(matches: &ArgMatches) { let config = parse_config(matches); trace!("read config: {:?}", config); let validator = Validator::new(config); validator.start() } fn parse_config(matches: &ArgMatches) -> Config { let config_file_path = matches.value_of("config").unwrap(); // since this is happening at the very startup, it's fine to panic if file doesn't exist let config_content = std::fs::read_to_string(config_file_path).unwrap(); toml::from_str(&config_content).unwrap() } pub mod built_info { // The file has been placed there by the build script. include!(concat!(env!("OUT_DIR"), "/built.rs")); } fn execute(matches: ArgMatches) -> Result<(), String> { match matches.subcommand() { ("run", Some(m)) => Ok(run(m)), _ => Err(usage()), } } fn usage() -> String { banner() + "usage: --help to see available options.\n\n" } fn banner() -> String { format!( r#" _ __ _ _ _ __ ___ | '_ \| | | | '_ \ _ \ | | | | |_| | | | | | | |_| |_|\__, |_| |_| |_| |___/ (validator - version {:}) "#, built_info::PKG_VERSION ) }
#[doc = "Register `ECCR` reader"] pub type R = crate::R<ECCR_SPEC>; #[doc = "Register `ECCR` writer"] pub type W = crate::W<ECCR_SPEC>; #[doc = "Field `ADDR_ECC` reader - ECC fail address"] pub type ADDR_ECC_R = crate::FieldReader<u32>; #[doc = "Field `BK_ECC` reader - ECC fail bank"] pub type BK_ECC_R = crate::BitReader<BK_ECC_A>; #[doc = "ECC fail bank\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum BK_ECC_A { #[doc = "0: Bank 1"] Bank1 = 0, #[doc = "1: Bank 2"] Bank2 = 1, } impl From<BK_ECC_A> for bool { #[inline(always)] fn from(variant: BK_ECC_A) -> Self { variant as u8 != 0 } } impl BK_ECC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> BK_ECC_A { match self.bits { false => BK_ECC_A::Bank1, true => BK_ECC_A::Bank2, } } #[doc = "Bank 1"] #[inline(always)] pub fn is_bank1(&self) -> bool { *self == BK_ECC_A::Bank1 } #[doc = "Bank 2"] #[inline(always)] pub fn is_bank2(&self) -> bool { *self == BK_ECC_A::Bank2 } } #[doc = "Field `SYSF_ECC` reader - System Flash ECC fail"] pub type SYSF_ECC_R = crate::BitReader<SYSF_ECC_A>; #[doc = "System Flash ECC fail\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SYSF_ECC_A { #[doc = "1: This bit indicates that the ECC error correction or double ECC error detection is located in the System Flash"] InSystemFlash = 1, } impl From<SYSF_ECC_A> for bool { #[inline(always)] fn from(variant: SYSF_ECC_A) -> Self { variant as u8 != 0 } } impl SYSF_ECC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<SYSF_ECC_A> { match self.bits { true => Some(SYSF_ECC_A::InSystemFlash), _ => None, } } #[doc = "This bit indicates that the ECC error correction or double ECC error detection is located in the System Flash"] #[inline(always)] pub fn is_in_system_flash(&self) -> bool { *self == SYSF_ECC_A::InSystemFlash } } #[doc = "Field `ECCIE` reader - ECC correction interrupt enable"] pub type ECCIE_R = crate::BitReader<ECCIE_A>; #[doc = "ECC correction interrupt enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCIE_A { #[doc = "0: ECCC interrupt disabled"] Disabled = 0, #[doc = "1: ECCC interrupt enabled"] Enabled = 1, } impl From<ECCIE_A> for bool { #[inline(always)] fn from(variant: ECCIE_A) -> Self { variant as u8 != 0 } } impl ECCIE_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ECCIE_A { match self.bits { false => ECCIE_A::Disabled, true => ECCIE_A::Enabled, } } #[doc = "ECCC interrupt disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == ECCIE_A::Disabled } #[doc = "ECCC interrupt enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == ECCIE_A::Enabled } } #[doc = "Field `ECCIE` writer - ECC correction interrupt enable"] pub type ECCIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ECCIE_A>; impl<'a, REG, const O: u8> ECCIE_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "ECCC interrupt disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(ECCIE_A::Disabled) } #[doc = "ECCC interrupt enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(ECCIE_A::Enabled) } } #[doc = "Field `ECCC2` reader - ECC2 correction"] pub type ECCC2_R = crate::BitReader<ECCC2R_A>; #[doc = "ECC2 correction\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCC2R_A { #[doc = "0: No ECC error detected on MSB"] NoError = 0, #[doc = "1: Set by hardware when one ECC errors have been detected and corrected on MSB"] Error = 1, } impl From<ECCC2R_A> for bool { #[inline(always)] fn from(variant: ECCC2R_A) -> Self { variant as u8 != 0 } } impl ECCC2_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ECCC2R_A { match self.bits { false => ECCC2R_A::NoError, true => ECCC2R_A::Error, } } #[doc = "No ECC error detected on MSB"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == ECCC2R_A::NoError } #[doc = "Set by hardware when one ECC errors have been detected and corrected on MSB"] #[inline(always)] pub fn is_error(&self) -> bool { *self == ECCC2R_A::Error } } #[doc = "ECC2 correction\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCC2W_AW { #[doc = "1: Cleared by writing 1"] Clear = 1, } impl From<ECCC2W_AW> for bool { #[inline(always)] fn from(variant: ECCC2W_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `ECCC2` writer - ECC2 correction"] pub type ECCC2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ECCC2W_AW>; impl<'a, REG, const O: u8> ECCC2_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Cleared by writing 1"] #[inline(always)] pub fn clear(self) -> &'a mut crate::W<REG> { self.variant(ECCC2W_AW::Clear) } } #[doc = "Field `ECCD2` reader - ECC2 detection"] pub type ECCD2_R = crate::BitReader<ECCD2R_A>; #[doc = "ECC2 detection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCD2R_A { #[doc = "0: No double ECC errors detected on MSB"] NoError = 0, #[doc = "1: Set by hardware when two ECC errors have been detected on MSB"] Error = 1, } impl From<ECCD2R_A> for bool { #[inline(always)] fn from(variant: ECCD2R_A) -> Self { variant as u8 != 0 } } impl ECCD2_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ECCD2R_A { match self.bits { false => ECCD2R_A::NoError, true => ECCD2R_A::Error, } } #[doc = "No double ECC errors detected on MSB"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == ECCD2R_A::NoError } #[doc = "Set by hardware when two ECC errors have been detected on MSB"] #[inline(always)] pub fn is_error(&self) -> bool { *self == ECCD2R_A::Error } } #[doc = "ECC2 detection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCD2W_AW { #[doc = "1: Cleared by writing 1"] Clear = 1, } impl From<ECCD2W_AW> for bool { #[inline(always)] fn from(variant: ECCD2W_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `ECCD2` writer - ECC2 detection"] pub type ECCD2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ECCD2W_AW>; impl<'a, REG, const O: u8> ECCD2_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Cleared by writing 1"] #[inline(always)] pub fn clear(self) -> &'a mut crate::W<REG> { self.variant(ECCD2W_AW::Clear) } } #[doc = "Field `ECCC` reader - ECC correction"] pub type ECCC_R = crate::BitReader<ECCCR_A>; #[doc = "ECC correction\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCCR_A { #[doc = "0: No ECC error detected on LSB"] NoError = 0, #[doc = "1: Set by hardware when one ECC errors have been detected and corrected on LSB"] Error = 1, } impl From<ECCCR_A> for bool { #[inline(always)] fn from(variant: ECCCR_A) -> Self { variant as u8 != 0 } } impl ECCC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ECCCR_A { match self.bits { false => ECCCR_A::NoError, true => ECCCR_A::Error, } } #[doc = "No ECC error detected on LSB"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == ECCCR_A::NoError } #[doc = "Set by hardware when one ECC errors have been detected and corrected on LSB"] #[inline(always)] pub fn is_error(&self) -> bool { *self == ECCCR_A::Error } } #[doc = "ECC correction\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCCW_AW { #[doc = "1: Cleared by writing 1"] Clear = 1, } impl From<ECCCW_AW> for bool { #[inline(always)] fn from(variant: ECCCW_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `ECCC` writer - ECC correction"] pub type ECCC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ECCCW_AW>; impl<'a, REG, const O: u8> ECCC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Cleared by writing 1"] #[inline(always)] pub fn clear(self) -> &'a mut crate::W<REG> { self.variant(ECCCW_AW::Clear) } } #[doc = "Field `ECCD` reader - ECC detection"] pub type ECCD_R = crate::BitReader<ECCDR_A>; #[doc = "ECC detection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCDR_A { #[doc = "0: No double ECC errors detected on LSB"] NoError = 0, #[doc = "1: Set by hardware when two ECC errors have been detected on LSB"] Error = 1, } impl From<ECCDR_A> for bool { #[inline(always)] fn from(variant: ECCDR_A) -> Self { variant as u8 != 0 } } impl ECCD_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ECCDR_A { match self.bits { false => ECCDR_A::NoError, true => ECCDR_A::Error, } } #[doc = "No double ECC errors detected on LSB"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == ECCDR_A::NoError } #[doc = "Set by hardware when two ECC errors have been detected on LSB"] #[inline(always)] pub fn is_error(&self) -> bool { *self == ECCDR_A::Error } } #[doc = "ECC detection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ECCDW_AW { #[doc = "1: Cleared by writing 1"] Clear = 1, } impl From<ECCDW_AW> for bool { #[inline(always)] fn from(variant: ECCDW_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `ECCD` writer - ECC detection"] pub type ECCD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ECCDW_AW>; impl<'a, REG, const O: u8> ECCD_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Cleared by writing 1"] #[inline(always)] pub fn clear(self) -> &'a mut crate::W<REG> { self.variant(ECCDW_AW::Clear) } } impl R { #[doc = "Bits 0:20 - ECC fail address"] #[inline(always)] pub fn addr_ecc(&self) -> ADDR_ECC_R { ADDR_ECC_R::new(self.bits & 0x001f_ffff) } #[doc = "Bit 19 - ECC fail bank"] #[inline(always)] pub fn bk_ecc(&self) -> BK_ECC_R { BK_ECC_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - System Flash ECC fail"] #[inline(always)] pub fn sysf_ecc(&self) -> SYSF_ECC_R { SYSF_ECC_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 24 - ECC correction interrupt enable"] #[inline(always)] pub fn eccie(&self) -> ECCIE_R { ECCIE_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 28 - ECC2 correction"] #[inline(always)] pub fn eccc2(&self) -> ECCC2_R { ECCC2_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - ECC2 detection"] #[inline(always)] pub fn eccd2(&self) -> ECCD2_R { ECCD2_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - ECC correction"] #[inline(always)] pub fn eccc(&self) -> ECCC_R { ECCC_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - ECC detection"] #[inline(always)] pub fn eccd(&self) -> ECCD_R { ECCD_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 24 - ECC correction interrupt enable"] #[inline(always)] #[must_use] pub fn eccie(&mut self) -> ECCIE_W<ECCR_SPEC, 24> { ECCIE_W::new(self) } #[doc = "Bit 28 - ECC2 correction"] #[inline(always)] #[must_use] pub fn eccc2(&mut self) -> ECCC2_W<ECCR_SPEC, 28> { ECCC2_W::new(self) } #[doc = "Bit 29 - ECC2 detection"] #[inline(always)] #[must_use] pub fn eccd2(&mut self) -> ECCD2_W<ECCR_SPEC, 29> { ECCD2_W::new(self) } #[doc = "Bit 30 - ECC correction"] #[inline(always)] #[must_use] pub fn eccc(&mut self) -> ECCC_W<ECCR_SPEC, 30> { ECCC_W::new(self) } #[doc = "Bit 31 - ECC detection"] #[inline(always)] #[must_use] pub fn eccd(&mut self) -> ECCD_W<ECCR_SPEC, 31> { ECCD_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Flash ECC register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ECCR_SPEC; impl crate::RegisterSpec for ECCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`eccr::R`](R) reader structure"] impl crate::Readable for ECCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`eccr::W`](W) writer structure"] impl crate::Writable for ECCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets ECCR to value 0"] impl crate::Resettable for ECCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::custom_types::exceptions::{index_error, value_error}; use crate::custom_var::{downcast_var, CustomVar}; use crate::int_var::IntVar; use crate::looping::{self, TypicalIterator}; use crate::method::{NativeMethod, StdMethod}; use crate::name::Name; use crate::operator::Operator; use crate::runtime::Runtime; use crate::std_type::Type; use crate::string_var::StringVar; use crate::variable::{FnResult, Variable}; use crate::{first, first_n}; use num::{One, Signed, Zero}; use std::cell::RefCell; use std::mem::replace; use std::ops::Neg; use std::rc::Rc; #[derive(Debug, Eq, PartialEq)] pub struct Range { start: IntVar, stop: IntVar, step: IntVar, } impl Range { pub fn new(start: IntVar, stop: IntVar, step: IntVar) -> Range { Range { start, stop, step } } pub fn get_start(&self) -> &IntVar { &self.start } pub fn get_stop(&self) -> &IntVar { &self.stop } pub fn get_step(&self) -> &IntVar { &self.step } pub fn values(&self) -> impl Iterator<Item = IntVar> + '_ { RangeValueIter { current: self.start.clone(), value: self, } } pub fn from_slice(len: usize, runtime: &mut Runtime, arg: Variable) -> Result<Rc<Range>, ()> { runtime.call_attr(arg, "toRange", vec![len.into()])?; Result::Ok(downcast_var(runtime.pop_return()).expect("Expected a range")) } fn before_end(&self, value: &IntVar) -> bool { if self.step.is_positive() { value < &self.stop } else { value > &self.stop } } fn op_fn(o: Operator) -> NativeMethod<Rc<Range>> { match o { Operator::Str => Self::str, Operator::Repr => Self::str, Operator::Equals => Self::eq, Operator::Iter => Self::iter, Operator::GetAttr => Self::index, Operator::In => Self::contains, Operator::Reversed => Self::reversed, _ => unimplemented!("range.{}", o.name()), } } fn str(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.return_1(self.to_str().into()) } fn eq(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.len() == 1); let is_eq = match downcast_var::<Range>(first(args)) { Result::Err(_) => false, Result::Ok(other) => self == other, }; runtime.return_1(is_eq.into()) } fn iter(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.return_1(Rc::new(RangeIter::new(self)).into()) } fn index(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.len() == 1); let index = IntVar::from(first(args)); let result = &self.start + &(&index * &self.step); if !self.before_end(&result) { let max_index = (&self.stop - &self.start) / self.step.clone(); runtime.throw_quick( index_error(), format!( "Index {} out of bounds for {} (max index is {})", result, self.to_str(), max_index, ), ) } else { runtime.return_1(result.into()) } } fn contains(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.len() == 1); let value = IntVar::from(first(args)); let result = if self.step.is_positive() { value >= self.start && value < self.stop } else { value <= self.start && value > self.stop } && (&(&value - &self.start) % &self.step).is_zero(); runtime.return_1(result.into()) } fn reversed(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); let new_stop = &self.start - &self.step; let new_start = &self.stop - &self.step; let new_step = (&self.step).neg(); // Turn into -(&self.step) when IDE stops making it an error runtime.return_1(Rc::new(Self::new(new_start, new_stop, new_step)).into()) } fn get(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 1); let index = IntVar::from(first(args)); let result = &self.start + &(&index * &self.step); if !self.before_end(&result) { runtime.return_1(Option::None.into()) } else { runtime.return_1(Option::Some(Variable::from(result)).into()) } } fn to_str(&self) -> StringVar { if self.step.is_one() { format!("[{}:{}]", self.start, self.stop).into() } else { format!("[{}:{}:{}]", self.start, self.stop, self.step).into() } } fn len(&self) -> IntVar { let (start, stop) = if self.step.is_negative() { (&self.stop, &self.start) } else { (&self.start, &self.stop) }; (stop - start) / self.step.abs() } fn create(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.len() == 3); let [start, stop, step] = first_n(args); let range = Range::new(start.into(), stop.into(), step.into()); if range.step.is_zero() { runtime.throw_quick(value_error(), "Step cannot be 0") } else { runtime.return_1(Rc::new(range).into()) } } pub fn range_type() -> Type { custom_class!(Range, create, "range") } } impl CustomVar for Range { fn set(self: Rc<Self>, _name: Name, _object: Variable) { unimplemented!() } fn get_type(&self) -> Type { Self::range_type() } fn get_operator(self: Rc<Self>, op: Operator) -> Variable { let func = Range::op_fn(op); StdMethod::new_native(self, func).into() } fn get_attribute(self: Rc<Self>, attr: &str) -> Variable { let func = match attr { "length" => return self.len().into(), "get" => Self::get, x => unimplemented!("Range.{}", x), }; StdMethod::new_native(self, func).into() } fn call_op( self: Rc<Self>, operator: Operator, args: Vec<Variable>, runtime: &mut Runtime, ) -> FnResult { runtime.call_native_method(Range::op_fn(operator), self, args) } fn call_op_or_goto( self: Rc<Self>, operator: Operator, args: Vec<Variable>, runtime: &mut Runtime, ) -> FnResult { runtime.call_native_method(Range::op_fn(operator), self, args) } fn str(self: Rc<Self>, _runtime: &mut Runtime) -> Result<StringVar, ()> { Result::Ok(self.to_str()) } fn repr(self: Rc<Self>, _runtime: &mut Runtime) -> Result<StringVar, ()> { Result::Ok(self.to_str()) } fn iter(self: Rc<Self>, _runtime: &mut Runtime) -> Result<looping::Iterator, ()> { Result::Ok(Rc::new(RangeIter::new(self)).into()) } } #[derive(Debug)] struct RangeIter { current: RefCell<IntVar>, value: Rc<Range>, } impl RangeIter { pub fn new(value: Rc<Range>) -> RangeIter { RangeIter { current: RefCell::new(value.start.clone()), value, } } fn true_next(&self) -> Option<IntVar> { if self.value.before_end(&*self.current.borrow()) { Option::Some(self.current.replace_with(|x| self.value.get_step() + &*x)) } else { Option::None } } fn create(_args: Vec<Variable>, _runtime: &mut Runtime) -> FnResult { unimplemented!() } fn range_iter_type() -> Type { custom_class!(RangeIter, create, "RangeIter") } } impl TypicalIterator for RangeIter { fn inner_next(&self) -> Option<Variable> { self.true_next().map(Into::into) } fn get_type() -> Type { Self::range_iter_type() } } #[derive(Debug, Clone)] struct RangeValueIter<'a> { current: IntVar, value: &'a Range, } impl Iterator for RangeValueIter<'_> { type Item = IntVar; fn next(&mut self) -> Option<Self::Item> { if self.value.before_end(&self.current) { let new = &self.current + self.value.get_step(); Option::Some(replace(&mut self.current, new)) } else { Option::None } } }
extern crate rayon; use rayon::prelude::*; use super::{Chunk, Chunks}; use crate::internal_data_structure::raw_bit_vector::RawBitVector; impl super::Chunks { /// Constructor. pub fn new(rbv: &RawBitVector) -> Chunks { let n = rbv.len(); let chunk_size: u16 = Chunks::calc_chunk_size(n); let chunks_cnt: u64 = Chunks::calc_chunks_cnt(n); // In order to use chunks.par_iter_mut(), chunks should have len first. // So fill meaning less None value. let mut opt_chunks: Vec<Option<Chunk>> = vec![None; chunks_cnt as usize]; // Parallel - Each chunk has its popcount. // Actually, chunk should have total popcount from index 0 but it is calculated later in sequential manner. opt_chunks .par_iter_mut() .enumerate() .for_each(|(i_chunk, chunk)| { let this_chunk_size: u16 = if i_chunk as u64 == chunks_cnt - 1 { // When `chunk_size == 6`: // // 000 111 000 11 : rbv // | | | : chunks // // Here, when `i_chunk == 1` (targeting on last '00011' chunk), // `this_chunk_size == 5` let chunk_size_or_0 = (n % chunk_size as u64) as u16; if chunk_size_or_0 == 0 { chunk_size } else { chunk_size_or_0 } } else { chunk_size }; let chunk_rbv = rbv.clone_sub(i_chunk as u64 * chunk_size as u64, this_chunk_size as u64); let popcnt_in_chunk = chunk_rbv.popcount(); *chunk = Some(Chunk::new( popcnt_in_chunk, this_chunk_size, rbv, i_chunk as u64, )); }); // Sequential - Each chunk has total popcount from index 0. let mut chunks: Vec<Chunk> = opt_chunks.into_iter().map(|v| v.unwrap()).collect(); for i_chunk in 0..(chunks_cnt as usize) { chunks[i_chunk].value += if i_chunk == 0 { 0 } else { chunks[i_chunk - 1].value } } Chunks { chunks, chunks_cnt } } /// Returns size of 1 chunk: _(log N)^2_. pub fn calc_chunk_size(n: u64) -> u16 { let lg2 = (n as f64).log2() as u16; let sz = lg2 * lg2; if sz == 0 { 1 } else { sz } } /// Returns count of chunks: _N / (log N)^2_. /// /// At max: N / (log N)^2 = 2^64 / 64^2 = 2^(64-12) pub fn calc_chunks_cnt(n: u64) -> u64 { let chunk_size = Chunks::calc_chunk_size(n); n / (chunk_size as u64) + if n % (chunk_size as u64) == 0 { 0 } else { 1 } } /// Returns i-th chunk. /// /// # Panics /// When _`i` >= `self.chunks_cnt()`_. pub fn access(&self, i: u64) -> &Chunk { assert!( i <= self.chunks_cnt, "i = {} must be smaller then {} (self.chunks_cnt())", i, self.chunks_cnt ); &self.chunks[i as usize] } } #[cfg(test)] mod new_success_tests { use super::Chunks; use crate::internal_data_structure::raw_bit_vector::RawBitVector; struct Input<'a> { byte_slice: &'a [u8], last_byte_len: u8, expected_chunk_size: u16, expected_chunks: &'a Vec<u64>, } macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let input: Input = $value; let rbv = RawBitVector::new(input.byte_slice, 0, input.last_byte_len); let n = rbv.len(); let chunks = Chunks::new(&rbv); assert_eq!(Chunks::calc_chunk_size(n), input.expected_chunk_size); assert_eq!(Chunks::calc_chunks_cnt(n), input.expected_chunks.len() as u64); for (i, expected_chunk) in input.expected_chunks.iter().enumerate() { let chunk = chunks.access(i as u64); assert_eq!(chunk.value(), *expected_chunk); } } )* } } parameterized_tests! { t1: Input { // N = 1, (log_2(N))^2 = 1 byte_slice: &[0b0000_0000], last_byte_len: 1, expected_chunk_size: 1, expected_chunks: &vec!(0) }, t2: Input { // N = 1, (log_2(N))^2 = 1 byte_slice: &[0b1000_0000], last_byte_len: 1, expected_chunk_size: 1, expected_chunks: &vec!(1) }, t3: Input { // N = 2^2, (log_2(N))^2 = 4 byte_slice: &[0b0111_0000], last_byte_len: 4, expected_chunk_size: 4, expected_chunks: &vec!(3) }, t4: Input { // N = 2^3, (log_2(N))^2 = 9 byte_slice: &[0b0111_1101], last_byte_len: 8, expected_chunk_size: 9, expected_chunks: &vec!(6) }, t5: Input { // N = 2^3 + 1, (log_2(N))^2 = 9 byte_slice: &[0b0111_1101, 0b1000_0000], last_byte_len: 1, expected_chunk_size: 9, expected_chunks: &vec!(7) }, t6: Input { // N = 2^3 + 2, (log_2(N))^2 = 9 byte_slice: &[0b0111_1101, 0b1100_0000], last_byte_len: 2, expected_chunk_size: 9, expected_chunks: &vec!(7, 8) }, bugfix_11: Input { // N = 2^1, (log_2(N))^2 = 4 byte_slice: &[0b1100_0000], last_byte_len: 2, expected_chunk_size: 1, expected_chunks: &vec!(1, 2) }, bugfix_11110110_11010101_01000101_11101111_10101011_10100101_01100011_00110100_01010101_10010000_01001100_10111111_00110011_00111110_01110101_11011100: Input { // N = 8 * 16 = 2^7, (log_2(N))^2 = 49 byte_slice: &[0b11110110, 0b11010101, 0b01000101, 0b11101111, 0b10101011, 0b10100101, 0b0_1100011, 0b00110100, 0b01010101, 0b10010000, 0b01001100, 0b10111111, 0b00_110011, 0b00111110, 0b01110101, 0b11011100], last_byte_len: 8, expected_chunk_size: 49, expected_chunks: &vec!(30, 53, 72) }, } }
fn main() { let arr = [1, 2, 3]; let bb = bbQQ(); println!("{}", arr[2]); println!("{}", bb); } fn bbQQ() -> i32 { // warning help: convert the identifier to snake case: `bb_qq` 303 }
use super::ValueDef; use crate::{qjs, Map}; use serde::{Deserialize, Serialize}; use std::{ fmt, fmt::{Display, Formatter, Result as FmtResult}, }; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, qjs::IntoJs, qjs::FromJs)] #[serde(untagged, rename_all = "lowercase")] #[quickjs(untagged, rename_all = "lowercase")] pub enum Value { None, Bool(bool), Int(i64), Float(f64), String(String), List(Vec<Value>), Dict(Map<String, Value>), } impl<'js> qjs::IntoJs<'js> for &Value { fn into_js(self, ctx: qjs::Ctx<'js>) -> qjs::Result<qjs::Value<'js>> { self.clone().into_js(ctx) } } impl Default for Value { fn default() -> Self { Value::None } } impl Value { pub fn default_for(def: &ValueDef) -> Self { match def { ValueDef::Any {} | ValueDef::Option { .. } => Value::None, ValueDef::Bool {} => Value::Bool(false), ValueDef::Int { min, .. } => Value::Int((*min).max(0)), ValueDef::Float { min, .. } => Value::Float((*min).max(0.0)), ValueDef::String { min, .. } => { Value::String((0..*min).map(|n| ((n % 10) as u8 + b'0') as char).collect()) } ValueDef::Either { options } => options .iter() .next() .map(Value::default_for) .unwrap_or(Value::None), ValueDef::Enum { options, .. } => options.iter().next().cloned().unwrap_or(Value::None), ValueDef::Tuple { values } => { Value::List(values.iter().map(Value::default_for).collect()) } ValueDef::Record { fields } => Value::Dict( fields .iter() .map(|(field, value)| (field.clone(), Value::default_for(value))) .collect(), ), ValueDef::List { value, min, .. } => { Value::List((0..*min).map(|_| Value::default_for(value)).collect()) } ValueDef::Dict { value, min, .. } => Value::Dict( (0..*min) .map(|n| (n.to_string(), Value::default_for(value))) .collect(), ), } } } impl Display for Value { fn fmt(&self, f: &mut Formatter) -> FmtResult { match self { Value::None => "none".fmt(f), Value::Bool(value) => if *value { "true" } else { "false" }.fmt(f), Value::Int(value) => value.fmt(f), Value::Float(value) => value.fmt(f), Value::String(value) => fmt::Debug::fmt(value, f), Value::List(values) => { '['.fmt(f)?; let mut iter = values.iter(); if let Some(value) = iter.next() { value.fmt(f)?; for value in iter { ", ".fmt(f)?; value.fmt(f)?; } } ']'.fmt(f) } Value::Dict(values) => { '{'.fmt(f)?; let mut iter = values.iter(); if let Some((field, value)) = iter.next() { field.fmt(f)?; ": ".fmt(f)?; value.fmt(f)?; for (field, value) in iter { ", ".fmt(f)?; field.fmt(f)?; ": ".fmt(f)?; value.fmt(f)?; } } '}'.fmt(f) } } } }
use futures::{Future, Stream}; use netlink_packet_core::{ header::flags::{NLM_F_ACK, NLM_F_CREATE, NLM_F_EXCL, NLM_F_REQUEST}, NetlinkFlags, NetlinkMessage, NetlinkPayload, }; use netlink_packet_route::RtnlMessage; use super::AddressHandle; use crate::{Error, ErrorKind, Handle}; lazy_static! { // Flags for `ip addr del` static ref DEL_FLAGS: NetlinkFlags = NetlinkFlags::from(NLM_F_REQUEST | NLM_F_ACK | NLM_F_EXCL | NLM_F_CREATE); } pub struct AddressFlushRequest { handle: Handle, index: u32, } impl AddressFlushRequest { pub(crate) fn new(handle: Handle, index: u32) -> Self { AddressFlushRequest { handle, index } } /// Execute the request pub fn execute(self) -> impl Future<Item = (), Error = Error> { let handle = self.handle.clone(); let index = self.index; AddressHandle::new(self.handle.clone()) .get() .execute() .filter(move |msg| msg.header.index == index) .map(move |msg| { let mut req = NetlinkMessage::from(RtnlMessage::DelAddress(msg)); req.header.flags = *DEL_FLAGS; handle.clone().request(req).for_each(|message| { if let NetlinkPayload::Error(err) = message.payload { Err(ErrorKind::NetlinkError(err).into()) } else { Ok(()) } }) }) // 0xff is arbitrary. It is the max amount of futures that will be // buffered. .buffer_unordered(0xff) // turn the stream into a future. .for_each(|()| Ok(())) } }
// Copyright 2015 The GeoRust Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use rustc_serialize::json::{Json, Object}; use ::{Bbox, Crs, Error, Feature, FromObject, Geometry, Position}; pub fn get_coords_value<'a>(object: &Object) -> Result<&Json, Error> { return Ok(expect_property!(object, "coordinates", "Encountered Geometry object without 'coordinates' member")); } /// Used by FeatureCollection, Feature, Geometry pub fn get_bbox(object: &Object) -> Result<Option<Bbox>, Error> { let bbox_json = match object.get("bbox") { Some(b) => b, None => return Ok(None), }; let bbox_array = match bbox_json.as_array() { Some(b) => b, None => return Err(Error::new("Encountered 'bbox' with non-array value")), }; let mut bbox = vec![]; for item_json in bbox_array { match item_json.as_f64() { Some(item_f64) => bbox.push(item_f64), None => return Err(Error::new("Encountered non numeric value in 'bbox' array")), } } return Ok(Some(bbox)); } /// Used by FeatureCollection, Feature, Geometry pub fn get_crs(object: &Object) -> Result<Option<Crs>, Error> { let crs_json = match object.get("crs") { Some(b) => b, None => return Ok(None), }; let crs_object = match crs_json.as_object() { Some(c) => c, None => return Err(Error::new("Encountered 'crs' with non-object value")), }; return Crs::from_object(crs_object).map(Some); } /// Used by Feature pub fn get_properties(object: &Object) -> Result<Option<Object>, Error> { let properties = expect_property!(object, "properties", "missing 'properties' field"); return match *properties { Json::Object(ref x) => Ok(Some(x.clone())), Json::Null => Ok(None), _ => return Err(Error::new("expected an Object or Null value for feature properties")), }; } /// Retrieve a single Position from the value of the "coordinates" key /// /// Used by Value::Point pub fn get_coords_one_pos(object: &Object) -> Result<Position, Error> { let coords_json = try!(get_coords_value(object)); return json_to_position(&coords_json); } /// Retrieve a one dimensional Vec of Positions from the value of the "coordinates" key /// /// Used by Value::MultiPoint and Value::LineString pub fn get_coords_1d_pos(object: &Object) -> Result<Vec<Position>, Error> { let coords_json = try!(get_coords_value(object)); return json_to_1d_positions(&coords_json); } /// Retrieve a two dimensional Vec of Positions from the value of the "coordinates" key /// /// Used by Value::MultiLineString and Value::Polygon pub fn get_coords_2d_pos(object: &Object) -> Result<Vec<Vec<Position>>, Error> { let coords_json = try!(get_coords_value(object)); return json_to_2d_positions(&coords_json); } /// Retrieve a three dimensional Vec of Positions from the value of the "coordinates" key /// /// Used by Value::MultiPolygon pub fn get_coords_3d_pos(object: &Object) -> Result<Vec<Vec<Vec<Position>>>, Error> { let coords_json = try!(get_coords_value(object)); return json_to_3d_positions(&coords_json); } /// Used by Value::GeometryCollection pub fn get_geometries(object: &Object) -> Result<Vec<Geometry>, Error> { let geometries_json = expect_property!(object, "geometries", "Encountered GeometryCollection without 'geometries' property"); let geometries_array = expect_array!(geometries_json); let mut geometries = vec![]; for json in geometries_array { let obj = expect_object!(json); let geometry = try!(Geometry::from_object(obj)); geometries.push(geometry); } return Ok(geometries); } /// Used by Feature pub fn get_id(object: &Object) -> Result<Option<Json>, Error> { return Ok(object.get("id").map(Clone::clone)); } /// Used by Feature pub fn get_geometry(object: &Object) -> Result<Geometry, Error> { let geometry = expect_object!(expect_property!(object, "geometry", "Missing 'geometry' field")); return Geometry::from_object(geometry); } /// Used by FeatureCollection pub fn get_features(object: &Object) -> Result<Vec<Feature>, Error> { let mut features = vec![]; let features_json = expect_array!(expect_property!(object, "features", "Missing 'features' field")); for feature in features_json { let feature: &Object = expect_object!(feature); let feature: Feature = try!(Feature::from_object(feature)); features.push(feature); } return Ok(features); } fn json_to_position(json: &Json) -> Result<Position, Error> { let coords_array = expect_array!(json); let mut coords = vec![]; for position in coords_array { coords.push(expect_f64!(position)); } return Ok(coords); } fn json_to_1d_positions(json: &Json) -> Result<Vec<Position>, Error> { let coords_array = expect_array!(json); let mut coords = vec![]; for item in coords_array { coords.push(try!(json_to_position(item))); } return Ok(coords); } fn json_to_2d_positions(json: &Json) -> Result<Vec<Vec<Position>>, Error> { let coords_array = expect_array!(json); let mut coords = vec![]; for item in coords_array { coords.push(try!(json_to_1d_positions(item))); } return Ok(coords); } fn json_to_3d_positions(json: &Json) -> Result<Vec<Vec<Vec<Position>>>, Error> { let coords_array = expect_array!(json); let mut coords = vec![]; for item in coords_array { coords.push(try!(json_to_2d_positions(item))); } return Ok(coords); }
use super::TokenCredential; use azure_core::TokenResponse; use chrono::{DateTime, TimeZone, Utc}; use oauth2::AccessToken; use serde::{ de::{self, Deserializer}, Deserialize, }; use std::str; use url::Url; const MSI_ENDPOINT_ENV_KEY: &str = "IDENTITY_ENDPOINT"; const MSI_SECRET_ENV_KEY: &str = "IDENTITY_HEADER"; const MSI_API_VERSION: &str = "2019-08-01"; /// Attempts authentication using a managed identity that has been assigned to the deployment environment. /// /// This authentication type works in Azure VMs, App Service and Azure Functions applications, as well as the Azure Cloud Shell /// /// Built up from docs at [https://docs.microsoft.com/azure/app-service/overview-managed-identity#using-the-rest-protocol](https://docs.microsoft.com/azure/app-service/overview-managed-identity#using-the-rest-protocol) pub struct ImdsManagedIdentityCredential; #[non_exhaustive] #[derive(Debug, thiserror::Error)] pub enum ManagedIdentityCredentialError { #[error("Error parsing url for MSI endpoint: {0}")] MsiEndpointParseUrlError(url::ParseError), #[error( "Missing MSI secret set in {} environment variable", MSI_SECRET_ENV_KEY )] MissingMsiSecret(std::env::VarError), #[error("Refresh token send error: {0}")] SendError(reqwest::Error), #[error("Error deserializing refresh token: {0}")] DeserializeError(reqwest::Error), #[error("The requested identity has not been assigned to this resource.")] IdentityUnavailableError, #[error("The request failed due to a gateway error.")] GatewayError, } #[async_trait::async_trait] impl TokenCredential for ImdsManagedIdentityCredential { type Error = ManagedIdentityCredentialError; async fn get_token(&self, resource: &str) -> Result<TokenResponse, Self::Error> { let msi_endpoint = std::env::var(MSI_ENDPOINT_ENV_KEY) .unwrap_or_else(|_| "http://169.254.169.254/metadata/identity/oauth2/token".to_owned()); let query_items = vec![("api-version", MSI_API_VERSION), ("resource", resource)]; let msi_endpoint_url = Url::parse_with_params(&msi_endpoint, &query_items) .map_err(ManagedIdentityCredentialError::MsiEndpointParseUrlError)?; let msi_secret = std::env::var(MSI_SECRET_ENV_KEY) .map_err(ManagedIdentityCredentialError::MissingMsiSecret)?; let client = reqwest::Client::new(); let response = client .get(msi_endpoint_url) .header("Metadata", "true") .header("X-IDENTITY-HEADER", msi_secret) .send() .await .map_err(ManagedIdentityCredentialError::SendError)?; match response.status().as_u16() { 400 => Err(ManagedIdentityCredentialError::IdentityUnavailableError), 502 | 504 => Err(ManagedIdentityCredentialError::GatewayError), _ => { let token_response = response .json::<MsiTokenResponse>() .await .map_err(ManagedIdentityCredentialError::DeserializeError)?; Ok(TokenResponse::new( token_response.access_token, token_response.expires_on, )) } } } } #[async_trait::async_trait] impl azure_core::TokenCredential for ImdsManagedIdentityCredential { async fn get_token( &self, resource: &str, ) -> Result<azure_core::TokenResponse, azure_core::Error> { TokenCredential::get_token(self, resource) .await .map_err(|error| azure_core::Error::GetTokenError(Box::new(error))) } } pub fn expires_on_string<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error> where D: Deserializer<'de>, { let v = String::deserialize(deserializer)?; let as_i64 = v.parse::<i64>().map_err(de::Error::custom)?; Ok(Utc.timestamp(as_i64, 0)) } // NOTE: expires_on is a String version of unix epoch time, not an integer. // https://docs.microsoft.com/en-us/azure/app-service/overview-managed-identity?tabs=dotnet#rest-protocol-examples #[derive(Debug, Clone, Deserialize)] struct MsiTokenResponse { pub access_token: AccessToken, #[serde(deserialize_with = "expires_on_string")] pub expires_on: DateTime<Utc>, pub token_type: String, pub resource: String, } #[cfg(test)] mod tests { use super::*; #[derive(Debug, Deserialize)] struct TestExpires { #[serde(deserialize_with = "expires_on_string")] date: DateTime<Utc>, } #[test] fn check_expires_on_string() { let as_string = r#"{"date": "1586984735"}"#; let expected = Utc.ymd(2020, 4, 15).and_hms(21, 5, 35); let parsed: TestExpires = serde_json::from_str(as_string).expect("deserialize should succeed"); assert_eq!(expected, parsed.date); } }
use proconio::{input, marker::*}; use std::*; fn main() { input! { chars: Chars, } let mut isYes = false; for i in 0..chars.len() { if i % 2 == 0 { if !chars[i].is_uppercase() { isYes = true; }else{ isYes = false; } } else { if chars[i].is_uppercase() { isYes = true; }else{ isYes = false; } } if !isYes { break; } } if isYes { println!("Yes"); } else { println!("No"); } }
use crate::async_message_handler_with_span; use crate::{ db::{ self, session::{InternalSession, SessionId}, DbExecutor, }, span::{AsyncSpanHandler, SpanMessage}, }; use actix::prelude::*; use color_eyre::eyre::Report; use db::user::UserId; use tracing::info; use tracing::{debug, span, Level}; #[derive(Default)] pub struct SessionActor {} impl Actor for SessionActor { type Context = Context<Self>; fn started(&mut self, _ctx: &mut Self::Context) { info!("Session actor started"); } } impl SystemService for SessionActor {} impl Supervised for SessionActor {} #[derive(Message, Clone)] #[rtype(result = "Result<Option<InternalSession>, Report>")] pub struct SessionById(pub SessionId); async_message_handler_with_span!({ impl AsyncSpanHandler<SessionById> for SessionActor { async fn handle(msg: SessionById) -> Result<Option<InternalSession>, Report> { debug!("Handling session by id"); DbExecutor::from_registry() .send(SpanMessage::new(db::session::SessionById(msg.0))) .await? } } }); #[derive(Message, Clone)] #[rtype(result = "Result<InternalSession, Report>")] pub struct SaveSession(pub UserId); async_message_handler_with_span!({ impl AsyncSpanHandler<SaveSession> for SessionActor { async fn handle(msg: SaveSession) -> Result<InternalSession, Report> { let user_id = msg.0.clone(); let span = span!( Level::DEBUG, "save session", user_id = user_id.as_string().as_str() ); let _enter = span.enter(); debug!("Saving session"); DbExecutor::from_registry() .send(SpanMessage::new(db::session::SaveSession(msg.0))) .await? } } });
use regex::Regex; use std::collections::{HashMap, HashSet}; use std::num::ParseIntError; use std::str::FromStr; fn input_gen(input: &str) -> (u32, HashMap<u32, OPCODE>, &str) { let mut parts = input.split("\n\n\n"); let part1 = parts.next().unwrap(); let (part1_answer, op_map) = run_samples( part1 .split("\n\n") .map(|s| s.trim().parse::<Sample>().unwrap()) .collect(), ); (part1_answer, op_map, parts.next().unwrap()) } #[aoc(day16, part1)] fn solve_part1(input: &str) -> u32 { input_gen(input).0 } #[aoc(day16, part2)] fn solve_part2(input: &str) -> usize { let (_, op_map, part2_input) = input_gen(input); let mut model = Model::new(op_map); for line in part2_input.trim().lines() { let mut instruction: Vec<_> = line .split_whitespace() .map(|s| s.parse::<usize>().unwrap()) .collect(); model.execute(&instruction); } println!("registers: {:?}", &model.registers); model.registers[0] } fn run_samples(samples: Vec<Sample>) -> (u32, HashMap<u32, OPCODE>) { use self::OPCODE::*; let mut three_or_more = 0; let mut op_map: HashMap<u32, HashSet<OPCODE>> = HashMap::new(); for s in &samples { let mut possible_op = 0; let mut result = s.after[s.output]; let mut opcode_candidates = HashSet::new(); // check addr if s.before[s.input_a] + s.before[s.input_b] == result { opcode_candidates.insert(Addr); possible_op += 1; } // check addi if s.before[s.input_a] + s.input_b == result { opcode_candidates.insert(Addi); possible_op += 1; } // check mulr if s.before[s.input_a] * s.before[s.input_b] == result { opcode_candidates.insert(Mulr); possible_op += 1; } // check muli if s.before[s.input_a] * s.input_b == result { opcode_candidates.insert(Muli); possible_op += 1; } // check banr if s.before[s.input_a] & s.before[s.input_b] == result { opcode_candidates.insert(Banr); possible_op += 1; } // check bani if s.before[s.input_a] & s.input_b == result { opcode_candidates.insert(Bani); possible_op += 1; } // check borr if s.before[s.input_a] | s.before[s.input_b] == result { opcode_candidates.insert(Borr); possible_op += 1; } // check bori if s.before[s.input_a] | s.input_b == result { opcode_candidates.insert(Bori); possible_op += 1; } // check setr if s.before[s.input_a] == result { opcode_candidates.insert(Setr); possible_op += 1; } // check seti if s.input_a == result { opcode_candidates.insert(Seti); possible_op += 1; } if result == 0 || result == 1 { // check gtir if (s.input_a > s.before[s.input_b]) as usize == result { opcode_candidates.insert(Gtir); possible_op += 1; } // check gtri if (s.before[s.input_a] > s.input_b) as usize == result { opcode_candidates.insert(Gtri); possible_op += 1; } // check gtrr if (s.before[s.input_a] > s.before[s.input_b]) as usize == result { opcode_candidates.insert(Gtrr); possible_op += 1; } // check eqir if (s.input_a == s.before[s.input_b]) as usize == result { opcode_candidates.insert(Eqir); possible_op += 1; } // check eqri if (s.before[s.input_a] == s.input_b) as usize == result { opcode_candidates.insert(Eqri); possible_op += 1; } // check eqrr if (s.before[s.input_a] == s.before[s.input_b]) as usize == result { opcode_candidates.insert(Eqrr); possible_op += 1; } } if possible_op >= 3 { three_or_more += 1; } if let Some(opcodes) = op_map.get(&s.opcode) { op_map.insert( s.opcode, opcodes.intersection(&opcode_candidates).cloned().collect(), ); } else { op_map.insert(s.opcode, opcode_candidates); } } (three_or_more, final_opcode_map(op_map)) } fn final_opcode_map(mut m: HashMap<u32, HashSet<OPCODE>>) -> HashMap<u32, OPCODE> { let mut result = HashMap::new(); while result.len() < 16 { let mut removed = Vec::new(); for (opcode, names) in &m { if names.len() == 1 { let name = names.iter().next().unwrap().clone(); result.insert(*opcode, name.clone()); removed.push(name); } } for (opcode, names) in &mut m { for r in &removed { names.remove(r); } } } result } #[derive(Hash, Eq, PartialEq, Debug, Copy, Clone)] enum OPCODE { Addr, Addi, Mulr, Muli, Banr, Bani, Bori, Borr, Setr, Seti, Gtir, Gtri, Gtrr, Eqir, Eqri, Eqrr, } struct Model { op_map: HashMap<u32, OPCODE>, registers: [usize; 4], } impl Model { fn new(op_map: HashMap<u32, OPCODE>) -> Self { Model { op_map, registers: [0, 0, 0, 0], } } fn execute(&mut self, instruction: &[usize]) { use self::OPCODE::*; let (opcode, a, b, c) = ( instruction[0] as u32, instruction[1], instruction[2], instruction[3], ); match self.op_map[&opcode] { Addr => { self.registers[c] = self.registers[a] + self.registers[b]; } Addi => { self.registers[c] = self.registers[a] + b; } Mulr => { self.registers[c] = self.registers[a] * self.registers[b]; } Muli => { self.registers[c] = self.registers[a] * b; } Banr => { self.registers[c] = self.registers[a] & self.registers[b]; } Bani => { self.registers[c] = self.registers[a] & b; } Borr => { self.registers[c] = self.registers[a] | self.registers[b]; } Bori => { self.registers[c] = self.registers[a] | b; } Setr => { self.registers[c] = self.registers[a]; } Seti => { self.registers[c] = a; } Gtir => { self.registers[c] = (a > self.registers[b]) as usize; } Gtri => { self.registers[c] = (self.registers[a] > b) as usize; } Gtrr => { self.registers[c] = (self.registers[a] > self.registers[b]) as usize; } Eqir => { self.registers[c] = (a == self.registers[b]) as usize; } Eqri => { self.registers[c] = (self.registers[a] == b) as usize; } Eqrr => { self.registers[c] = (self.registers[a] == self.registers[b]) as usize; } } } } #[derive(Debug)] struct Sample { before: [usize; 4], after: [usize; 4], input_a: usize, input_b: usize, output: usize, opcode: u32, } impl FromStr for Sample { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { lazy_static! { static ref regexep: Regex = Regex::new( r"(?x) (?-x)Before: \[(?P<b0>\d), (?P<b1>\d), (?P<b2>\d), (?P<b3>\d)\](?x)\n (?-x)(?P<OP>\d+) (?P<A>\d) (?P<B>\d) (?P<C>\d)(?x)\n (?-x)After: \[(?P<a0>\d), (?P<a1>\d), (?P<a2>\d), (?P<a3>\d)\]" ) .unwrap(); } assert!(regexep.is_match(s), "regex does not match string"); let caps = regexep.captures(s).unwrap(); let before = [ caps["b0"].parse()?, caps["b1"].parse()?, caps["b2"].parse()?, caps["b3"].parse()?, ]; let after = [ caps["a0"].parse()?, caps["a1"].parse()?, caps["a2"].parse()?, caps["a3"].parse()?, ]; Ok(Sample { before, after, opcode: caps["OP"].parse()?, input_a: caps["A"].parse()?, input_b: caps["B"].parse()?, output: caps["C"].parse()?, }) } }
use std::env; use std::process; use std::time::SystemTime; use minigrep::Config; fn main() { let config = Config::new(env::args()).unwrap_or_else(|err| { eprintln!("Problem parsing arguments: {}", err); process::exit(1); }); println!("\t Searching for \"{}\" in file: {}\n\n\n", config.query, config.filename); let start = std::time::SystemTime::now(); if let Err(e) = minigrep::run(config) { eprintln!("Application error: {}", e); process::exit(1); } let end = std::time::SystemTime::now(); let time = end.duration_since(start).unwrap().as_millis(); println!("{}", time); }
use _rustgrimp::importgraph::ImportGraph; use _rustgrimp::layers::{find_illegal_dependencies, Level}; use serde_json::{Map, Value}; use std::collections::{HashMap, HashSet}; use std::fs; #[test] fn test_large_graph() { let data = fs::read_to_string("tests/large_graph.json").expect("Unable to read file"); let value: Value = serde_json::from_str(&data).unwrap(); let items: &Map<String, Value> = value.as_object().unwrap(); let mut importeds_by_importer: HashMap<&str, HashSet<&str>> = HashMap::new(); for (importer, importeds_value) in items.iter() { let mut importeds = HashSet::new(); for imported in importeds_value.as_array().unwrap() { importeds.insert(imported.as_str().unwrap()); } importeds_by_importer.insert(importer, importeds); } let graph = ImportGraph::new(importeds_by_importer); let levels = vec![ Level { layers: vec!["plugins"], }, Level { layers: vec!["interfaces"], }, Level { layers: vec!["application"], }, Level { layers: vec!["domain"], }, Level { layers: vec!["data"], }, ]; let containers = HashSet::from(["mypackage"]); find_illegal_dependencies(&graph, &levels, &containers); }
use std::{ collections::HashMap, os::raw::c_void, sync::{ atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, Mutex, }, }; #[ctor::ctor] unsafe fn initialize_allocation_recording() { tree_sitter::set_allocator( Some(ts_record_malloc), Some(ts_record_calloc), Some(ts_record_realloc), Some(ts_record_free), ); } #[derive(Debug, PartialEq, Eq, Hash)] struct Allocation(*const c_void); unsafe impl Send for Allocation {} unsafe impl Sync for Allocation {} #[derive(Default)] struct AllocationRecorder { enabled: AtomicBool, allocation_count: AtomicUsize, outstanding_allocations: Mutex<HashMap<Allocation, usize>>, } thread_local! { static RECORDER: AllocationRecorder = Default::default(); } extern "C" { fn malloc(size: usize) -> *mut c_void; fn calloc(count: usize, size: usize) -> *mut c_void; fn realloc(ptr: *mut c_void, size: usize) -> *mut c_void; fn free(ptr: *mut c_void); } pub fn record<T>(f: impl FnOnce() -> T) -> T { RECORDER.with(|recorder| { recorder.enabled.store(true, SeqCst); recorder.allocation_count.store(0, SeqCst); recorder.outstanding_allocations.lock().unwrap().clear(); }); let value = f(); let outstanding_allocation_indices = RECORDER.with(|recorder| { recorder.enabled.store(false, SeqCst); recorder.allocation_count.store(0, SeqCst); recorder .outstanding_allocations .lock() .unwrap() .drain() .map(|e| e.1) .collect::<Vec<_>>() }); if !outstanding_allocation_indices.is_empty() { panic!( "Leaked allocation indices: {:?}", outstanding_allocation_indices ); } value } fn record_alloc(ptr: *mut c_void) { RECORDER.with(|recorder| { if recorder.enabled.load(SeqCst) { let count = recorder.allocation_count.fetch_add(1, SeqCst); recorder .outstanding_allocations .lock() .unwrap() .insert(Allocation(ptr), count); } }); } fn record_dealloc(ptr: *mut c_void) { RECORDER.with(|recorder| { if recorder.enabled.load(SeqCst) { recorder .outstanding_allocations .lock() .unwrap() .remove(&Allocation(ptr)); } }); } unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void { let result = malloc(size); record_alloc(result); result } unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void { let result = calloc(count, size); record_alloc(result); result } unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void { record_dealloc(ptr); let result = realloc(ptr, size); record_alloc(result); result } unsafe extern "C" fn ts_record_free(ptr: *mut c_void) { record_dealloc(ptr); free(ptr); }
/* * Copyright (c) 2013, David Renshaw (dwrenshaw@gmail.com) * * See the LICENSE file in the capnproto-rust root directory. */ use std; use common::*; use message; pub type SegmentId = u32; pub struct SegmentReader<'self> { messageReader : &'self message::MessageReader<'self>, segment : &'self [u8] } pub struct SegmentBuilder { messageBuilder : @mut message::MessageBuilder, id : SegmentId, pos : WordCount, size : WordCount } impl SegmentBuilder { pub fn new(messageBuilder : @mut message::MessageBuilder, size : WordCount) -> SegmentBuilder { SegmentBuilder { messageBuilder : messageBuilder, id : messageBuilder.segments.len() as SegmentId, pos : 0, size : size } } pub fn withMutSegment<T>(@mut self, f : &fn(&mut [u8]) -> T) -> T { f(self.messageBuilder.segments[self.id]) } pub fn allocate(@mut self, amount : WordCount) -> Option<WordCount> { if (amount > self.size - self.pos) { return None; } else { let result = self.pos; self.pos += amount; return Some(result); } } pub fn available(@mut self) -> WordCount { self.size - self.pos } #[inline] pub fn memset(@mut self, ptr: uint, c: u8, count: uint) { do self.withMutSegment |segment| { unsafe { let p = segment.unsafe_mut_ref(ptr); std::ptr::set_memory(p, c, count) } } } pub fn asReader<T>(@mut self, f : &fn(SegmentReader) -> T) -> T { do self.messageBuilder.asReader |messageReader| { f(SegmentReader { messageReader : &messageReader, segment : messageReader.segments[self.id] }) } } }
//! As easier to use interface for `uritemplate` crate. use std::fmt; use uritemplate::{IntoTemplateVar, TemplateVar}; /// A URI Template. /// /// See IETF RFC 6570. pub struct UriTemplate(uritemplate::UriTemplate); /// #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum IfEmpty { /// Assign empty value. Set, /// Assign comma (`,`). Comma, /// Omit the whole variable. Skip, } impl UriTemplate { /// Prepare a new template for evaluation. /// Takes a temaplte string and returns a new [`UriTemplate`]. #[must_use] pub fn new(template: &str) -> Self { Self(uritemplate::UriTemplate::new(template)) } /// Bind a variable to a scalar value. #[allow(clippy::needless_pass_by_value)] // clippy bug pub fn set_scalar(&mut self, varname: impl AsRef<str>, var: impl ToString) -> &mut Self { self.0 .set(varname.as_ref(), TemplateVar::Scalar(var.to_string())); self } /// Bind a variable to a scalar value, if there is a value. /// Is the value is `None`, omit the variable. pub fn set_optional_scalar( &mut self, varname: impl AsRef<str>, var: Option<impl ToString>, ) -> &mut Self { match var { Some(var) => self.set_scalar(varname, var), None => self, } } /// Bind a variable to a list value. pub fn set_list<T: ToString>( &mut self, varname: impl AsRef<str>, var: impl IntoIterator<Item = T>, ) -> &mut Self { self.0.set( varname.as_ref(), TemplateVar::List(var.into_iter().map(|val| val.to_string()).collect()), ); self } /// Bind a variable to a list value, specifying what happends if the value /// us empty. pub fn set_list_with_if_empty<T: ToString>( &mut self, varname: impl AsRef<str>, var: impl IntoIterator<Item = T>, if_empty: IfEmpty, ) -> &mut Self { let mut var = var.into_iter(); match var.next() { Some(first) => self.set_list(varname, std::iter::once(first).chain(var)), None => match if_empty { IfEmpty::Set => self.set_list(varname, std::iter::empty::<String>()), IfEmpty::Comma => self.set_scalar(varname, ","), IfEmpty::Skip => self, }, } } /// Bind a variable to an assiatative array value. pub fn set_assoc( &mut self, varname: impl AsRef<str>, var: impl IntoIterator<Item = (String, String)>, ) -> &mut Self { self.0.set( varname.as_ref(), TemplateVar::AssociativeArray(var.into_iter().collect()), ); self } /// Bind the variable to a raw [`IntoTemplateVar`] implementor. /// /// This is a lower-level API, suitable for utilizing [`uritemplate`] /// crate API. pub fn set_template_var( &mut self, varname: impl AsRef<str>, var: impl IntoTemplateVar, ) -> &mut Self { self.0.set(varname.as_ref(), var); self } /// Apply a function to the value and return self. /// /// Useful for maintaining method chains. pub fn tap(&mut self, f: impl FnOnce(&mut Self)) -> &mut Self { f(self); self } /// Delete a variable binding set before. pub fn delete(&mut self, varname: impl AsRef<str>) -> bool { self.0.delete(varname.as_ref()) } /// Delete all variable bindings. pub fn delete_all(&mut self) { self.0.delete_all() } /// Build a URL from the template and bound variable values. pub fn build(&mut self) -> String { self.0.build() } } impl fmt::Debug for UriTemplate { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("UriTemplate") } }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT license. */ use crate::common::ANNResult; use super::ArcConcurrentBoxedQueue; use super::{scratch_traits::Scratch}; use std::time::Duration; pub struct ScratchStoreManager<T: Scratch> { scratch: Option<Box<T>>, scratch_pool: ArcConcurrentBoxedQueue<T>, } impl<T: Scratch> ScratchStoreManager<T> { pub fn new(scratch_pool: ArcConcurrentBoxedQueue<T>, wait_time: Duration) -> ANNResult<Self> { let mut scratch = scratch_pool.pop()?; while scratch.is_none() { scratch_pool.wait_for_push_notify(wait_time)?; scratch = scratch_pool.pop()?; } Ok(ScratchStoreManager { scratch, scratch_pool, }) } pub fn scratch_space(&mut self) -> Option<&mut T> { self.scratch.as_deref_mut() } } impl<T: Scratch> Drop for ScratchStoreManager<T> { fn drop(&mut self) { if let Some(mut scratch) = self.scratch.take() { scratch.clear(); let _ = self.scratch_pool.push(scratch); } } } #[cfg(test)] mod tests { use super::*; #[derive(Debug)] struct MyScratch { data: Vec<i32>, } impl Scratch for MyScratch { fn clear(&mut self) { self.data.clear(); } } #[test] fn test_scratch_store_manager() { let wait_time = Duration::from_millis(100); let scratch_pool = ArcConcurrentBoxedQueue::new(); for i in 1..3 { scratch_pool.push(Box::new(MyScratch { data: vec![i, 2 * i, 3 * i], })).unwrap(); } let mut manager = ScratchStoreManager::new(scratch_pool.clone(), wait_time).unwrap(); let scratch_space = manager.scratch_space().unwrap(); assert_eq!(scratch_space.data, vec![1, 2, 3]); // At this point, the ScratchStoreManager will go out of scope, // causing the Drop implementation to be called, which should // call the clear method on MyScratch. drop(manager); let current_scratch = scratch_pool.pop().unwrap().unwrap(); assert_eq!(current_scratch.data, vec![2, 4, 6]); } }
pub struct Camera { pub eye: glam::Vec3, pub target: glam::Vec3, pub up: glam::Vec3, pub aspect: f32, pub fovy: f32, pub znear: f32, pub zfar: f32, } impl Camera { pub fn build_view_projection_matrix(&self) -> glam::Mat4 { let view = glam::Mat4::look_at_rh(self.eye, self.target, self.up); let projection = glam::Mat4::perspective_infinite_reverse_rh(self.fovy, self.aspect, self.znear); projection * view } }
#[macro_use] extern crate quick_error; use itertools::Itertools; use rayon::prelude::*; use intcode::Intcode; use std::borrow::Cow; use std::env; use std::io; use std::num::ParseIntError; quick_error! { #[derive(Debug)] pub enum SuperError { IoError(err: io::Error) { from() } ParseIntError(err: ParseIntError) { from() } } } type SuperResult<T> = Result<T, SuperError>; fn main() -> SuperResult<()> { let input = { let name: Cow<'static, str> = env::args().nth(1) .map(|s| s.into()).unwrap_or_else(|| "input".into()); std::fs::read_to_string(name.as_ref())? }; let memory = input.trim().split(',') .map(str::parse) .collect::<Result<Vec<isize>, ParseIntError>>()?; println!("Part 1: {}", run_part1_rayon(&memory)?); println!("Part 2: {}", run_part2_rayon(&memory)?); Ok(()) } #[allow(dead_code)] fn run_part1(memory: &[isize]) -> SuperResult<isize> { (0..5).permutations(5).unique() .map(|sequence| run_amplifier(memory, &sequence)) //.try_fold(0, |a, b| Ok(std::cmp::max(a, b?))) .fold_results(0, std::cmp::max) // More convenient alternative to try_fold from Itertools } fn run_part1_rayon(memory: &[isize]) -> SuperResult<isize> { // Rayon doesn't play nicely with Itertools, so roll our own permutation (0..factorial(5)).into_par_iter() .map(|i| { let mut sequence = [0, 1, 2, 3, 4]; nth_permutation(&mut sequence, i); run_amplifier(memory, &sequence) }) .try_reduce(|| 0, |a, b| Ok(std::cmp::max(a, b))) } #[allow(dead_code)] fn run_part2(memory: &[isize]) -> SuperResult<isize> { (5..10).permutations(5).unique() .map(|sequence| run_feedback_amplifier(memory, &sequence)) .fold_results(0, std::cmp::max) } fn run_part2_rayon(memory: &[isize]) -> SuperResult<isize> { // Rayon doesn't play nicely with Itertools, so roll our own permutation (0..factorial(5)).into_par_iter() .map(|i| { let mut sequence = [5, 6, 7, 8, 9]; nth_permutation(&mut sequence, i); run_feedback_amplifier(memory, &sequence) }) .try_reduce(|| 0, |a, b| Ok(std::cmp::max(a, b))) } fn factorial(i: usize) -> usize { (1..=i).product() } fn to_factoradic(mut value: usize, n: usize) -> Vec<usize> { let mut factoradic = vec![0; n]; for i in 1..=n { factoradic[n - i] = value % i; value /= i; } factoradic } fn nth_permutation(sequence: &mut [isize], n: usize) { // For each digit in the factoradic representation... for (i, &offset) in to_factoradic(n, sequence.len()).iter().enumerate() { // ...rotate the selected element into the ith position sequence[i ..= i + offset].rotate_right(1); } } #[test] fn test_permutation() { (0..5).permutations(5).unique().enumerate() .for_each(|(i, expected)| { let mut sequence = [0, 1, 2, 3, 4]; nth_permutation(&mut sequence, i); assert_eq!(&sequence[..], &expected[..]); }); } fn run_amplifier(memory: &[isize], sequence: &[isize]) -> SuperResult<isize> { let mut input = 0; for &phase in sequence { let mut program = Intcode::new(memory); program.run()?; program.resume(phase)?; program.resume(input)?; input = *program.output.first().ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Program failed to produce output"))?; } Ok(input) } #[cfg(test)] fn test_amplifier_helper(memory: &[isize], input: &[isize], expected: isize) { let output = run_amplifier(memory, input).unwrap(); assert_eq!(output, expected); } #[test] fn test_amplifier() { test_amplifier_helper(&[3,15,3,16,1002,16,10,16,1,16,15,15,4,15,99,0,0], &[4,3,2,1,0], 43210); test_amplifier_helper(&[3,23,3,24,1002,24,10,24,1002,23,-1,23,101,5,23,23,1,24,23,23,4,23,99,0,0], &[0,1,2,3,4], 54321); test_amplifier_helper(&[3,31,3,32,1002,32,10,32,1001,31,-2,31,1007,31,0,33,1002,33,7,33,1,33,31,31,1,32,31,31,4,31,99,0,0,0], &[1,0,4,3,2], 65210); } fn run_feedback_amplifier(memory: &[isize], sequence: &[isize]) -> SuperResult<isize> { let mut input = 0; let mut amps = Vec::new(); for &phase in sequence { let mut amp = Intcode::new(memory); amp.run()?; amp.resume(phase)?; amps.push(amp); } let mut halted = false; while !halted { for amp in &mut amps { if !amp.resume(input)? { halted = true; } input = *amp.output.last().ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Program failed to produce output"))?; } } Ok(input) } #[cfg(test)] fn test_feedback_amplifier_helper(memory: &[isize], input: &[isize], expected: isize) { let output = run_feedback_amplifier(memory, input).unwrap(); assert_eq!(output, expected); } #[test] fn test_feedback_amplifier() { test_feedback_amplifier_helper( &[3,26,1001,26,-4,26,3,27,1002,27,2,27,1,27,26, 27,4,27,1001,28,-1,28,1005,28,6,99,0,0,5], &[9,8,7,6,5], 139629729); test_feedback_amplifier_helper( &[3,52,1001,52,-5,52,3,53,1,52,56,54,1007,54,5,55,1005,55,26,1001,54, -5,54,1105,1,12,1,53,54,53,1008,54,0,55,1001,55,1,55,2,53,55,53,4, 53,1001,56,-1,56,1005,56,6,99,0,0,0,0,10], &[9,7,8,5,6], 18216); }
use config::Config; use demangle::demangle as demangle_tww; use failure::{Error, ResultExt}; use linker::{LinkedSection, SectionKind}; use regex::{Captures, Regex}; use rustc_demangle::demangle as demangle_rust; use std::collections::HashMap; use std::fs::File; use std::io::{prelude::*, BufWriter}; use std::str; pub fn create( config: &Config, original: Option<&[u8]>, sections: &[LinkedSection], ) -> Result<(), Error> { let path = match &config.build.map { Some(path) => path, None => return Ok(()), }; let mut file = BufWriter::new(File::create(path).context("Couldn't create the symbol map")?); writeln!(file, ".text section layout")?; for section in sections { let mut section_name_buf; let section_name = section.section_name; let section_name = if section_name.starts_with(".text.") && section.kind == SectionKind::TextSection { section_name_buf = demangle_rust(&section_name[".text.".len()..]).to_string(); let mut section_name: &str = &section_name_buf; if section_name.len() >= 19 && &section_name[section_name.len() - 19..][..3] == "::h" { section_name = &section_name[..section_name.len() - 19]; } section_name } else { section_name }; writeln!( file, " 00000000 {:06x} {:08x} 4 {} \t{}", section.len - section.sym_offset, section.address + section.sym_offset, section_name, section.member_name )?; } if let Some(original) = original { let regex = Regex::new(r"(\s{2}\d\s)(.*)(\s{2}.*)").unwrap(); writeln!(file)?; writeln!(file)?; for line in str::from_utf8(original)?.lines() { let line = regex.replace(&line, |c: &Captures| { let demangled = demangle_tww(&c[2]); format!("{}{}{}", &c[1], demangled.unwrap_or(c[2].into()), &c[3]) }); writeln!(file, "{}", line)?; } } Ok(()) } pub fn parse(buf: &[u8]) -> Result<HashMap<String, u32>, Error> { let mut symbols = HashMap::new(); let regex = Regex::new(r"\s{2}\w{8}\s\w{6}\s(\w{8}).{4}(.*)\s{2}").unwrap(); let text = str::from_utf8(buf).context("The symbol map has invalid UTF-8")?; for line in text.lines() { if let Some(captures) = regex.captures(line) { let name = captures.get(2).unwrap().as_str(); if !name.starts_with('.') { let address = u32::from_str_radix(captures.get(1).unwrap().as_str(), 16) .context("Couldn't parse the address")?; symbols.insert( demangle_tww(name) .map(|n| n.into_owned()) .unwrap_or_else(|_| name.to_owned()), address, ); } } } Ok(symbols) }
use crate::{ core::Policy, persistence::{Persistence, PersistenceResult}, }; use log::trace; use rusqlite::InterruptHandle; use serde_json::Value; use std::collections::BTreeMap; pub struct Timed<P: Persistence>(P); impl<P: Persistence> Timed<P> { pub fn new(inner: P) -> Timed<P> { Timed(inner) } } macro_rules! timed { ($v:expr) => {{ let start = std::time::Instant::now(); let ans = $v; trace!( "[{}:{}] {}us", file!(), line!(), start.elapsed().as_micros() ); ans }}; } impl<P: Persistence> Persistence for Timed<P> { #[track_caller] fn query_named( &self, name: String, params: BTreeMap<String, Value>, ) -> PersistenceResult<Value> { timed!(self.0.query_named(name, params)) } fn mutate_named(&self, name: String, params: BTreeMap<String, Value>) -> PersistenceResult<()> { timed!(self.0.mutate_named(name, params)) } fn query_raw(&self, query: String) -> PersistenceResult<Value> { timed!(self.0.query_raw(query)) } fn mutate_raw(&self, stmt: String) -> PersistenceResult<()> { timed!(self.0.mutate_raw(stmt)) } fn fetch_policy(&self) -> PersistenceResult<Policy> { timed!(self.0.fetch_policy()) } fn set_policy(&self, policy: Policy) -> PersistenceResult<()> { timed!(self.0.set_policy(policy)) } fn get_interrupt_handle(&self) -> InterruptHandle { timed!(self.0.get_interrupt_handle()) } }
use hound; fn main() { let spec = hound::WavSpec { channels: 2, sample_rate: 48100, bits_per_sample: 32, sample_format: hound::SampleFormat::Float, }; let mut writer = hound::WavWriter::create("out.wav", spec).unwrap(); let song = mod_player::read_mod_file("mod_files/CHIP_SLAYER!.MOD"); mod_player::textout::print_song_info(&song); let mut player_state: mod_player::PlayerState = mod_player::PlayerState::new(song.format.num_channels, spec.sample_rate); loop { let (left, right) = mod_player::next_sample(&song, &mut player_state); writer.write_sample(left); writer.write_sample(right); if player_state.song_has_ended || player_state.has_looped { break; } } }
use std::cmp::Ordering; use itertools::Itertools; use std::collections::VecDeque; use amethyst::{ core::{ alga::linear::EuclideanSpace, math::{Point2, Vector2}, Time, Transform, Named }, derive::SystemDesc, ecs::{Entities, Join, Read, ReadExpect, Write, ReadStorage, System, SystemData, WriteStorage}, input::{InputHandler, StringBindings, VirtualKeyCode}, window::ScreenDimensions, winit::MouseButton, renderer::Camera, }; use crate::{ age_of_sail::{Notifications, point_mouse_to_world, DISTANCE_THRESHOLD}, components::{Action, Ai, Cargo, Controllable, Course, Patrol, Port, Selected, Ship}, map::Map, }; pub const SNAP_THRESHOLD: f32 = 5.0; #[derive(SystemDesc)] pub struct MoveShipsSystem; impl<'s> System<'s> for MoveShipsSystem { type SystemData = ( ReadStorage<'s, Ship>, WriteStorage<'s, Course>, WriteStorage<'s, Transform>, Read<'s, Map>, Read<'s, Time>, ); fn run(&mut self, (ships, mut courses, mut locals, map, time): Self::SystemData) { for (ship, course, local) in (&ships, &mut courses, &mut locals).join() { let ship_x = local.translation().x; let ship_y = local.translation().y; let ship_location = Point2::new(ship_x, ship_y); if let Some(next_waypoint) = course.waypoints.front() { let graph = map.nodes_and_edges_connected(vec![ship_location, *next_waypoint]); let mut points = VecDeque::from(graph.a_star(graph.nodes.len()-2, graph.nodes.len()-1)); if points.len() > 2 { // Remove first and last point points.pop_front(); points.pop_back(); while let Some(point) = points.pop_back() { course.waypoints.push_front(point); } } } if let Some(next_waypoint) = course.waypoints.front() { if ship_location.distance(next_waypoint) < DISTANCE_THRESHOLD { course.waypoints.pop_front(); } else { let direction = Vector2::new(next_waypoint.x - ship_x, next_waypoint.y - ship_y).normalize(); let distance = ship_location.distance(&next_waypoint); let closeness_modifier = if distance < DISTANCE_THRESHOLD * time.time_scale() { distance / (time.time_scale() + f32::EPSILON) } else { 1.0 }; local.prepend_translation_x( closeness_modifier * ship.base_speed * direction.x * time.delta_seconds(), ); local.prepend_translation_y( closeness_modifier * ship.base_speed * direction.y * time.delta_seconds(), ); } } } } } #[derive(SystemDesc)] pub struct PatrolSystem; impl<'s> System<'s> for PatrolSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Ai>, ReadStorage<'s, Transform>, WriteStorage<'s, Patrol>, WriteStorage<'s, Course>, ); fn run(&mut self, (entities, ais, locals, mut patrols, mut courses): Self::SystemData) { for (e, ai, patrol, local) in (&entities, &ais, &mut patrols, &locals).join() { if ai.current_state().action == Action::Patrol { let e_location = Point2::new(local.translation().x, local.translation().y); let previous_action_is_patrol = ai.previous_state().action == Action::Patrol; let new_next_waypoint_index = if previous_action_is_patrol { (patrol.next_waypoint_index + 1) % patrol.waypoints.len() } else { patrol .waypoints .iter() .enumerate() .min_by(|(_, a), (_, b)| { a.distance(&e_location) .partial_cmp(&(b.distance(&e_location))) .unwrap_or(Ordering::Equal) }) .map(|(index, _)| index) .unwrap() }; if e_location.distance(&patrol.waypoints[patrol.next_waypoint_index]) < DISTANCE_THRESHOLD || courses.get(e).is_none() || !previous_action_is_patrol { patrol.next_waypoint_index = new_next_waypoint_index; courses .insert( e, Course { waypoints: VecDeque::from(vec![patrol.waypoints[new_next_waypoint_index]]), }, ) .unwrap(); } } } } } #[derive(SystemDesc)] pub struct ChaseSystem; impl<'s> System<'s> for ChaseSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Ai>, ReadStorage<'s, Transform>, ReadStorage<'s, Ship>, WriteStorage<'s, Course>, ); fn run(&mut self, (entities, ais, locals, ships, mut courses): Self::SystemData) { for (e, ai, local) in (&entities, &ais, &locals).join() { if ai.current_state().action == Action::Chase { let e_location = Point2::new(local.translation().x, local.translation().y); // Chase closest ship let closest_ship = (&entities, &locals, &ships) .join() .filter(|(other_e, _, _)| e != *other_e) .min_by(|(_, a_local, _), (_, b_local, _)| { let a_location = Point2::new(a_local.translation().x, a_local.translation().y); let b_location = Point2::new(b_local.translation().x, b_local.translation().y); a_location .distance(&e_location) .partial_cmp(&(b_location.distance(&e_location))) .unwrap_or(Ordering::Equal) }); if let Some((_, other_local, _)) = closest_ship { let other_location = Point2::new(other_local.translation().x, other_local.translation().y); courses .insert( e, Course { waypoints: VecDeque::from(vec![other_location]), }, ) .unwrap(); } } } } } pub struct PlotCourseSystem; impl<'s> System<'s> for PlotCourseSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Camera>, ReadStorage<'s, Ship>, ReadStorage<'s, Transform>, ReadStorage<'s, Selected>, ReadStorage<'s, Controllable>, WriteStorage<'s, Course>, Read<'s, InputHandler<StringBindings>>, Read<'s, Map>, ReadExpect<'s, ScreenDimensions>, ); fn run( &mut self, (entities, cameras, ships, locals, selecteds, controllables, mut courses, input, map, screen_dimensions): Self::SystemData, ) { for (_, camera_local) in (&cameras, &locals).join() { for (e, _, _, _, _) in (&entities, &locals, &ships, &selecteds, &controllables).join() { if let Some((mouse_x, mouse_y)) = input.mouse_position() { if input.mouse_button_is_down(MouseButton::Right) { let point_in_world = point_mouse_to_world(mouse_x, mouse_y, &*screen_dimensions, camera_local.translation()); // Snap to any entity if close enough let point = &locals .join() .map(|l| Point2::new(l.translation().x, l.translation().y)) .filter(|p| point_in_world.distance(p) < SNAP_THRESHOLD) .next() .unwrap_or(point_in_world); // Snap to edge of land if on land let point = if map.on_land(*point) { map.closest_point_on_edge(*point) } else { *point }; if !input.key_is_down(VirtualKeyCode::LShift) { courses .insert( e, Course { waypoints: VecDeque::from(vec![point]), }, ) .unwrap(); } else { match courses.get_mut(e) { Some(c) => c.waypoints.push_back(point), None => { courses .insert( e, Course { waypoints: VecDeque::from(vec![point]), }, ) .unwrap(); } } } } } } } } } pub struct DockingSystem; impl<'s> System<'s> for DockingSystem { type SystemData = ( Entities<'s>, ReadStorage<'s, Ship>, ReadStorage<'s, Port>, ReadStorage<'s, Named>, WriteStorage<'s, Cargo>, WriteStorage<'s, Transform>, Write<'s, Notifications>, ); fn run(&mut self, (entities, ships, ports, names, mut cargos, locals, mut notifications): Self::SystemData) { for (p, _, port_local) in (&entities, &ports, &locals).join() { let port_location = Point2::new(port_local.translation().x, port_local.translation().y); // If a ship is nearby prepare to load ship let suitable_ship = (&entities, &ships, &locals) .join() .filter(|(_, _, l)| { let ship_location = Point2::new(l.translation().x, l.translation().y); ship_location.distance(&port_location) < DISTANCE_THRESHOLD }) .map(|(e, _, _)| e) .next(); if let Some(ship) = suitable_ship { let port_cargo = cargos.get(p).unwrap().items.clone(); let ship_cargo = cargos.get_mut(ship).unwrap(); for (item, amount) in &port_cargo { *ship_cargo.items.entry(*item).or_insert(0) += amount; } cargos.get_mut(p).unwrap().items.clear(); if !port_cargo.is_empty() { let items_notification = port_cargo .iter() .sorted_by_key(|(&item, _)| item) .map(|(item, amount)| format!("{} tons of {}", amount, item)) .collect::<Vec<_>>() .join(", "); notifications.push_back(format!( "{} loaded onto ship at {}.", items_notification, names.get(p).unwrap().name.to_string() )); } } } } } #[cfg(test)] mod tests { use super::*; use crate::components::{AiState, Cargo, ItemType}; use amethyst::{ecs::Entity, prelude::*, Result}; use amethyst_test::prelude::*; use std::collections::HashMap; #[test] fn moves_ships_adds_new_waypoints_if_choosing_the_current_one_passes_through_land() -> Result<()> { let waypoints = VecDeque::from(vec![Point2::new(120.0, 0.0)]); let num_waypoints = waypoints.len(); AmethystApplication::blank() .with_setup(|world| { world.insert(Map::new(vec![vec![Point2::new(50, 0), Point2::new(100, 75), Point2::new(100, -25)]] )) }) .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(Transform::default()) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); let course = courses.get(ship_entity).unwrap(); assert!(course.waypoints.len() > num_waypoints, "Number of waypoints more than originally"); }) .run() } #[test] fn moves_ships_does_not_adds_new_waypoints_if_choosing_the_current_does_not_pass_through_land() -> Result<()> { let waypoints = VecDeque::from(vec![Point2::new(120.0, -50.0)]); let num_waypoints = waypoints.len(); AmethystApplication::blank() .with_setup(|world| { world.insert(Map::new(vec![vec![Point2::new(50, 0), Point2::new(100, 75), Point2::new(100, -25)]] )) }) .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(Transform::default()) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); let course = courses.get(ship_entity).unwrap(); assert_eq!(num_waypoints, course.waypoints.len(), "Number of waypoints"); }) .run() } #[test] fn moves_ships_chooses_next_waypoint_if_close_enough_to_current_one() -> Result<()> { let waypoints = VecDeque::from(vec![Point2::new(0.00001, 0.0002), Point2::new(2.0, 3.0), Point2::new(20.0, -5.0)]); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(Transform::default()) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(|world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); let course = courses.get(ship_entity).unwrap(); assert_eq!(Point2::new(2.0, 3.0), *course.waypoints.front().unwrap(), "Next waypoint"); }) .run() } #[test] fn moves_ships_keeps_current_waypoint_if_not_close_enough_to_current_one() -> Result<()> { let waypoints = VecDeque::from(vec![Point2::new(2.0, 3.0), Point2::new(10.0, 20.0), Point2::new(20.0, -5.0)]); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(Transform::default()) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(|world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); let course = courses.get(ship_entity).unwrap(); assert_eq!(Point2::new(2.0, 3.0), *course.waypoints.front().unwrap(), "Next waypoint"); }) .run() } #[test] fn moves_ships_moves_closer_to_current_waypoint() -> Result<()> { let current_waypoint = Point2::new(10.0, 20.0); let waypoints = VecDeque::from(vec![current_waypoint, Point2::new(2.0, 3.0), Point2::new(20.0, -5.0)]); let original_local = Transform::default(); let original_ship_location = Point2::new(original_local.translation().x, original_local.translation().y); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(original_local) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let locals = world.read_storage::<Transform>(); let ship_local = locals.get(ship_entity).unwrap(); let ship_location = Point2::new(ship_local.translation().x, ship_local.translation().y); assert!(ship_location.distance(&current_waypoint) < original_ship_location.distance(&current_waypoint), "Ship closer to waypoint"); }) .run() } #[test] fn moves_ships_higher_base_speed_more() -> Result<()> { let current_waypoint = Point2::new(10.0, 20.0); let waypoints = VecDeque::from(vec![current_waypoint, Point2::new(2.0, 3.0), Point2::new(20.0, -5.0)]); let original_local = Transform::default(); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints.clone()}) .with(original_local.clone()) .build(); let faster_ship = world .create_entity() .with(Ship {base_speed: 2.0 }) .with(Course {waypoints: waypoints.clone()}) .with(original_local.clone()) .build(); world.insert(EffectReturn((ship, faster_ship))); }) .with_assertion(move |world| { let locals = world.read_storage::<Transform>(); let ship_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.0.clone(); let ship_local = locals.get(ship_entity).unwrap(); let ship_location = Point2::new(ship_local.translation().x, ship_local.translation().y); let faster_ship_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.1.clone(); let faster_ship_local = locals.get(faster_ship_entity).unwrap(); let faster_ship_location = Point2::new(faster_ship_local.translation().x, faster_ship_local.translation().y); assert!(faster_ship_location.distance(&current_waypoint) < ship_location.distance(&current_waypoint), "Faster ship closer to waypoint"); }) .run() } #[test] fn moves_ships_does_not_move_if_no_next_waypoint() -> Result<()> { let waypoints = VecDeque::from(vec![]); let original_local = Transform::default(); let original_local_cloned = original_local.clone(); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let ship = world .create_entity() .with(Ship {base_speed: 1.0 }) .with(Course {waypoints: waypoints}) .with(original_local) .build(); world.insert(EffectReturn(ship)); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let locals = world.read_storage::<Transform>(); let ship_local = locals.get(ship_entity).unwrap(); assert_eq!(original_local_cloned, *ship_local, "Ship transform"); }) .run() } #[test] fn moves_ships_does_not_move_non_ship_with_course() -> Result<()> { let waypoints = VecDeque::from(vec![Point2::new(2.0, 3.0), Point2::new(20.0, -5.0), Point2::new(25.0, 30.0)]); let original_local = Transform::default(); let original_local_cloned = original_local.clone(); AmethystApplication::blank() .with_system(MoveShipsSystem, "move_ships", &[]) .with_effect(move |world| { let entity = world .create_entity() .with(Course {waypoints: waypoints}) .with(original_local) .build(); world.insert(EffectReturn(entity)); }) .with_assertion(move |world| { let entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let locals = world.read_storage::<Transform>(); let local = locals.get(entity).unwrap(); assert_eq!(original_local_cloned, *local, "Ship transform"); }) .run() } #[test] fn ai_chooses_nearest_waypoint_if_previously_was_not_patrolling() -> Result<()> { let target_location = Point2::new(1.0, 5.0); let waypoints = vec![target_location.clone(), Point2::new(-10.0, -20.0), Point2::new(20.0, -5.0)]; AmethystApplication::blank() .with_system(PatrolSystem, "patrol", &[]) .with_effect(move |world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Patrol, }, AiState { transitions: HashMap::new(), action: Action::Chase, }, ], current_state_index: 0, previous_state_index: 1, }) .with(Patrol {waypoints: waypoints, next_waypoint_index: 1}) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let patrols = world.read_storage::<Patrol>(); let ai_patrol = patrols.get(ai_entity).unwrap(); assert_eq!(0, ai_patrol.next_waypoint_index); let courses = world.read_storage::<Course>(); let ai_course = courses.get(ai_entity).unwrap(); assert_eq!(1, ai_course.waypoints.len(), "Number of waypoints in course"); assert_eq!(target_location, ai_course.waypoints[0], "Waypoint location"); }) .run() } #[test] fn ai_plots_a_course_to_next_waypoint_if_close_enough_to_current_one() -> Result<()> { let target_location = Point2::new(20.0, -5.0); let waypoints = vec![Point2::new(10.0, 5.0), Point2::new(0.0001, 0.0002), target_location.clone()]; AmethystApplication::blank() .with_system(PatrolSystem, "patrol", &[]) .with_effect(move |world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Patrol, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Patrol {waypoints: waypoints, next_waypoint_index: 1}) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let patrols = world.read_storage::<Patrol>(); let ai_patrol = patrols.get(ai_entity).unwrap(); assert_eq!(2, ai_patrol.next_waypoint_index); let courses = world.read_storage::<Course>(); let ai_course = courses.get(ai_entity).unwrap(); assert_eq!(1, ai_course.waypoints.len(), "Number of waypoints in course"); assert_eq!(target_location, ai_course.waypoints[0], "Waypoint location"); }) .run() } #[test] fn ai_does_not_plot_a_course_to_next_waypoint_if_not_close_enough_to_current() -> Result<()> { let current_waypoint= Point2::new(1.0, 2.0); let waypoints = vec![Point2::new(10.0, 5.0), current_waypoint, Point2::new(20.0, 3.0)]; AmethystApplication::blank() .with_system(PatrolSystem, "patrol", &[]) .with_effect(move |world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Patrol, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Course {waypoints: VecDeque::from(vec![current_waypoint])}) .with(Patrol {waypoints: waypoints, next_waypoint_index: 1}) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let patrols = world.read_storage::<Patrol>(); let ai_patrol = patrols.get(ai_entity).unwrap(); assert_eq!(1, ai_patrol.next_waypoint_index); let courses = world.read_storage::<Course>(); let ai_course = courses.get(ai_entity).unwrap(); assert_eq!(1, ai_course.waypoints.len(), "Number of waypoints in course"); assert_eq!(current_waypoint, ai_course.waypoints[0], "Waypoint location"); }) .run() } #[test] fn ai_does_not_patrol_if_action_is_not_to_patrol() -> Result<()> { let waypoints = vec![Point2::new(10.0, 5.0), Point2::new(1.0, 2.0), Point2::new(20.0, 3.0)]; AmethystApplication::blank() .with_system(PatrolSystem, "patrol", &[]) .with_effect(move |world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Chase, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Patrol {waypoints: waypoints, next_waypoint_index: 1}) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); assert!( courses.get(ai_entity).is_none(), "Ai course does not exist"); }) .run() } #[test] fn ai_does_not_patrol_if_it_has_no_patrol() -> Result<()> { AmethystApplication::blank() .with_system(PatrolSystem, "patrol", &[]) .with_effect(move |world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Patrol, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); assert!( courses.get(ai_entity).is_none(), "Ai course does not exist"); }) .run() } #[test] fn ai_chases_nearest_ship() -> Result<()> { AmethystApplication::blank() .with_system(ChaseSystem, "chase", &[]) .with_effect(|world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Chase, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Transform::default()) .build(); let mut target_transform = Transform::default(); target_transform.set_translation_xyz(2.0, 1.0, 0.0); let target = world .create_entity() .with(Ship { base_speed: 1.0 }) .with(target_transform) .build(); let mut other_transform = Transform::default(); other_transform.set_translation_xyz(6.0, 5.0, 0.0); world.create_entity() .with(Ship { base_speed: 1.0 }) .with(other_transform) .build(); world.insert(EffectReturn((ai, target))); }) .with_assertion(move |world| { let ai_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.0.clone(); let target_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.1.clone(); let locals = world.read_storage::<Transform>(); let target_transform = locals.get(target_entity).unwrap(); let target_location = Point2::new(target_transform.translation().x, target_transform.translation().y); let courses = world.read_storage::<Course>(); let ai_course = courses.get(ai_entity).unwrap(); assert_eq!(1, ai_course.waypoints.len(), "Number of waypoints in course"); assert_eq!(target_location, ai_course.waypoints[0], "Waypoint location"); }) .run() } #[test] fn ai_does_not_chase_itself() -> Result<()> { AmethystApplication::blank() .with_system(ChaseSystem, "chase", &[]) .with_effect(|world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Chase, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Transform::default()) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(|world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); assert!(courses.get(ai_entity).is_none(), "Ai course does not exist"); }) .run() } #[test] fn ai_only_chases_ships() -> Result<()> { AmethystApplication::blank() .with_system(ChaseSystem, "chase", &[]) .with_effect(|world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Chase, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Transform::default()) .build(); let mut target_transform = Transform::default(); target_transform.set_translation_xyz(2.0, 1.0, 0.0); world .create_entity() .with(target_transform) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(|world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); assert!(courses.get(ai_entity).is_none(), "Ai course does not exist"); }) .run() } #[test] fn ai_does_not_chase_if_action_is_not_to_chase() -> Result<()> { AmethystApplication::blank() .with_system(ChaseSystem, "chase", &[]) .with_effect(|world| { let ai = world .create_entity() .with(Ai { states: vec![ AiState { transitions: HashMap::new(), action: Action::Patrol, }, ], current_state_index: 0, previous_state_index: 0, }) .with(Transform::default()) .build(); let mut target_transform = Transform::default(); target_transform.set_translation_xyz(2.0, 1.0, 0.0); world .create_entity() .with(Ship { base_speed: 1.0 }) .with(target_transform) .build(); world.insert(EffectReturn(ai)); }) .with_assertion(|world| { let ai_entity = world.read_resource::<EffectReturn<Entity>>().0.clone(); let courses = world.read_storage::<Course>(); assert!(courses.get(ai_entity).is_none(), "Ai course does not exist"); }) .run() } #[test] fn cargo_transferred_if_ship_nearby() -> Result<()> { let goods_in_port: HashMap<ItemType, u32> = [(ItemType::Sugar, 10), (ItemType::Rum, 5)] .iter() .cloned() .collect(); let original_goods_on_ship: HashMap<ItemType, u32> = [ (ItemType::Sugar, 10), (ItemType::Whiskey, 5), (ItemType::Rum, 10), ] .iter() .cloned() .collect(); let mut expected_goods_on_ship = original_goods_on_ship.clone(); for (k, v) in goods_in_port.iter() { *expected_goods_on_ship.entry(*k).or_insert(0) += v; } AmethystApplication::blank() .with_system(DockingSystem, "docking", &[]) .with_effect(move |world| { let port = world .create_entity() .with(Port) .named("A port") .with(Cargo { items: goods_in_port.clone(), }) .with(Transform::default()) .build(); let mut ship_transform = Transform::default(); ship_transform.set_translation_xyz(0.0001, 0.0002, 0.0); let ship = world .create_entity() .with(Ship { base_speed: 1.0 }) .with(Cargo { items: original_goods_on_ship.clone(), }) .with(ship_transform) .build(); world.insert(EffectReturn((ship, port))); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.0.clone(); let port_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.1.clone(); let cargos = world.read_storage::<Cargo>(); let port_cargo = cargos.get(port_entity).unwrap(); assert_eq!(HashMap::new(), port_cargo.items, "Cargo on port"); let ship_cargo = cargos.get(ship_entity).unwrap(); assert_eq!(expected_goods_on_ship, ship_cargo.items, "Cargon on ship"); }) .run() } #[test] fn notification_sent_if_cargo_loaded() -> Result<()> { const PORT: &str = "London"; let goods_in_port: HashMap<ItemType, u32> = [(ItemType::Sugar, 10), (ItemType::Rum, 5)] .iter() .cloned() .collect(); AmethystApplication::blank() .with_system(DockingSystem, "docking", &[]) .with_effect(move |world| { world .create_entity() .with(Port) .named(PORT) .with(Cargo { items: goods_in_port.clone(), }) .with(Transform::default()) .build(); let mut ship_transform = Transform::default(); ship_transform.set_translation_xyz(0.0001, 0.0002, 0.0); world .create_entity() .with(Ship { base_speed: 1.0 }) .with(Cargo { items: HashMap::new(), }) .with(ship_transform) .build(); }) .with_assertion(move |world| { let notifications = world.read_resource::<Notifications>().clone(); assert_eq!(1, notifications.len(), "Number of notifications"); assert_eq!( &format!( "5 tons of Rum, 10 tons of Sugar loaded onto ship at {}.", PORT ), notifications.front().unwrap(), "Notification" ); }) .run() } #[test] fn cargo_not_transferred_if_ship_not_nearby() -> Result<()> { let goods_in_port: HashMap<ItemType, u32> = [(ItemType::Sugar, 10), (ItemType::Rum, 5)] .iter() .cloned() .collect(); let goods_in_port_cloned = goods_in_port.clone(); let goods_on_ship: HashMap<ItemType, u32> = [ (ItemType::Sugar, 10), (ItemType::Whiskey, 5), (ItemType::Rum, 10), ] .iter() .cloned() .collect(); let goods_on_ship_cloned = goods_on_ship.clone(); AmethystApplication::blank() .with_system(DockingSystem, "docking", &[]) .with_effect(move |world| { let port = world .create_entity() .with(Port) .named("A port") .with(Cargo { items: goods_in_port.clone(), }) .with(Transform::default()) .build(); let mut ship_transform = Transform::default(); ship_transform.set_translation_xyz(10.0, 0.0, 0.0); let ship = world .create_entity() .with(Ship { base_speed: 1.0 }) .with(Cargo { items: goods_on_ship.clone(), }) .with(ship_transform) .build(); world.insert(EffectReturn((ship, port))); }) .with_assertion(move |world| { let ship_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.0.clone(); let port_entity = world.read_resource::<EffectReturn<(Entity, Entity)>>().0.1.clone(); let cargos = world.read_storage::<Cargo>(); let port_cargo = cargos.get(port_entity).unwrap(); assert_eq!(goods_in_port_cloned, port_cargo.items, "Cargo on port"); let ship_cargo = cargos.get(ship_entity).unwrap(); assert_eq!(goods_on_ship_cloned, ship_cargo.items, "Cargo on ship"); }) .run() } }
#![feature(test)] extern crate test; #[cfg(test)] mod tests { use super::*; use test::Bencher; use std::sync::Arc; use std::rc::Rc; // 3ns #[bench] fn bench_rc_string_clone(b: &mut Bencher) { let s = Rc::new(String::from("Hello")); b.iter(|| { test::black_box(s.clone()); }); } // 10ns #[bench] fn bench_arc_string_clone(b: &mut Bencher) { let s = Arc::new(String::from("Hello")); b.iter(|| { test::black_box(s.clone()); }); } // 23ns #[bench] fn bench_string_clone(b: &mut Bencher) { let s = String::from("Hello"); b.iter(|| { test::black_box(s.clone()); }); } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Trial { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<trial::Status>, #[serde(rename = "availableHosts", default, skip_serializing_if = "Option::is_none")] pub available_hosts: Option<i32>, } pub mod trial { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { TrialAvailable, TrialUsed, TrialDisabled, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Quota { #[serde(rename = "hostsRemaining", default, skip_serializing_if = "Option::is_none")] pub hosts_remaining: Option<serde_json::Value>, #[serde(rename = "quotaEnabled", default, skip_serializing_if = "Option::is_none")] pub quota_enabled: Option<quota::QuotaEnabled>, } pub mod quota { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum QuotaEnabled { Enabled, Disabled, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TrackedResource { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CloudError { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, #[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")] pub is_data_action: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<OperationProperties>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationProperties { #[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")] pub service_specification: Option<ServiceSpecification>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceSpecification { #[serde(rename = "logSpecifications", default, skip_serializing_if = "Vec::is_empty")] pub log_specifications: Vec<LogSpecification>, #[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")] pub metric_specifications: Vec<MetricSpecification>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LogSpecification { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")] pub blob_duration: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricSpecification { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")] pub display_description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub category: Option<String>, #[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")] pub aggregation_type: Option<String>, #[serde(rename = "supportedAggregationTypes", default, skip_serializing_if = "Vec::is_empty")] pub supported_aggregation_types: Vec<String>, #[serde(rename = "supportedTimeGrainTypes", default, skip_serializing_if = "Vec::is_empty")] pub supported_time_grain_types: Vec<String>, #[serde(rename = "fillGapWithZero", default, skip_serializing_if = "Option::is_none")] pub fill_gap_with_zero: Option<bool>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub dimensions: Vec<MetricDimension>, #[serde(rename = "enableRegionalMdmAccount", default, skip_serializing_if = "Option::is_none")] pub enable_regional_mdm_account: Option<String>, #[serde(rename = "sourceMdmAccount", default, skip_serializing_if = "Option::is_none")] pub source_mdm_account: Option<String>, #[serde(rename = "sourceMdmNamespace", default, skip_serializing_if = "Option::is_none")] pub source_mdm_namespace: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricDimension { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")] pub display_name: Option<String>, #[serde(rename = "internalName", default, skip_serializing_if = "Option::is_none")] pub internal_name: Option<String>, #[serde(rename = "toBeExportedForShoebox", default, skip_serializing_if = "Option::is_none")] pub to_be_exported_for_shoebox: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExpressRouteAuthorization { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ExpressRouteAuthorizationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExpressRouteAuthorizationProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<express_route_authorization_properties::ProvisioningState>, #[serde(rename = "expressRouteAuthorizationId", default, skip_serializing_if = "Option::is_none")] pub express_route_authorization_id: Option<String>, #[serde(rename = "expressRouteAuthorizationKey", default, skip_serializing_if = "Option::is_none")] pub express_route_authorization_key: Option<String>, } pub mod express_route_authorization_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Succeeded, Failed, Updating, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExpressRouteAuthorizationList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ExpressRouteAuthorization>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Circuit { #[serde(rename = "primarySubnet", default, skip_serializing_if = "Option::is_none")] pub primary_subnet: Option<String>, #[serde(rename = "secondarySubnet", default, skip_serializing_if = "Option::is_none")] pub secondary_subnet: Option<String>, #[serde(rename = "expressRouteID", default, skip_serializing_if = "Option::is_none")] pub express_route_id: Option<String>, #[serde(rename = "expressRoutePrivatePeeringID", default, skip_serializing_if = "Option::is_none")] pub express_route_private_peering_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Endpoints { #[serde(rename = "nsxtManager", default, skip_serializing_if = "Option::is_none")] pub nsxt_manager: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub vcsa: Option<String>, #[serde(rename = "hcxCloudManager", default, skip_serializing_if = "Option::is_none")] pub hcx_cloud_manager: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IdentitySource { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub alias: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub domain: Option<String>, #[serde(rename = "baseUserDN", default, skip_serializing_if = "Option::is_none")] pub base_user_dn: Option<String>, #[serde(rename = "baseGroupDN", default, skip_serializing_if = "Option::is_none")] pub base_group_dn: Option<String>, #[serde(rename = "primaryServer", default, skip_serializing_if = "Option::is_none")] pub primary_server: Option<String>, #[serde(rename = "secondaryServer", default, skip_serializing_if = "Option::is_none")] pub secondary_server: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub ssl: Option<identity_source::Ssl>, #[serde(default, skip_serializing_if = "Option::is_none")] pub username: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub password: Option<String>, } pub mod identity_source { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Ssl { Enabled, Disabled, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateCloud { #[serde(flatten)] pub tracked_resource: TrackedResource, pub sku: Sku, pub properties: PrivateCloudProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateCloudUpdate { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<PrivateCloudUpdateProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateCloudUpdateProperties { #[serde(rename = "managementCluster", default, skip_serializing_if = "Option::is_none")] pub management_cluster: Option<ManagementCluster>, #[serde(default, skip_serializing_if = "Option::is_none")] pub internet: Option<private_cloud_update_properties::Internet>, #[serde(rename = "identitySources", default, skip_serializing_if = "Vec::is_empty")] pub identity_sources: Vec<IdentitySource>, } pub mod private_cloud_update_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Internet { Enabled, Disabled, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateCloudProperties { #[serde(flatten)] pub private_cloud_update_properties: PrivateCloudUpdateProperties, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<private_cloud_properties::ProvisioningState>, #[serde(default, skip_serializing_if = "Option::is_none")] pub circuit: Option<Circuit>, #[serde(default, skip_serializing_if = "Option::is_none")] pub endpoints: Option<Endpoints>, #[serde(rename = "networkBlock")] pub network_block: String, #[serde(rename = "managementNetwork", default, skip_serializing_if = "Option::is_none")] pub management_network: Option<String>, #[serde(rename = "provisioningNetwork", default, skip_serializing_if = "Option::is_none")] pub provisioning_network: Option<String>, #[serde(rename = "vmotionNetwork", default, skip_serializing_if = "Option::is_none")] pub vmotion_network: Option<String>, #[serde(rename = "vcenterPassword", default, skip_serializing_if = "Option::is_none")] pub vcenter_password: Option<String>, #[serde(rename = "nsxtPassword", default, skip_serializing_if = "Option::is_none")] pub nsxt_password: Option<String>, #[serde(rename = "vcenterCertificateThumbprint", default, skip_serializing_if = "Option::is_none")] pub vcenter_certificate_thumbprint: Option<String>, #[serde(rename = "nsxtCertificateThumbprint", default, skip_serializing_if = "Option::is_none")] pub nsxt_certificate_thumbprint: Option<String>, } pub mod private_cloud_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Succeeded, Failed, Cancelled, Pending, Building, Deleting, Updating, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Cluster { #[serde(flatten)] pub resource: Resource, pub sku: Sku, pub properties: ClusterProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterUpdate { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ClusterUpdateProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterUpdateProperties { #[serde(rename = "clusterSize", default, skip_serializing_if = "Option::is_none")] pub cluster_size: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagementCluster { #[serde(flatten)] pub cluster_update_properties: ClusterUpdateProperties, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ClusterProvisioningState>, #[serde(rename = "clusterId", default, skip_serializing_if = "Option::is_none")] pub cluster_id: Option<i32>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub hosts: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterProperties { #[serde(flatten)] pub management_cluster: ManagementCluster, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ClusterProvisioningState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ClusterProvisioningState { Succeeded, Failed, Cancelled, Deleting, Updating, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateCloudList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PrivateCloud>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Cluster>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AdminCredentials { #[serde(rename = "nsxtUsername", default, skip_serializing_if = "Option::is_none")] pub nsxt_username: Option<String>, #[serde(rename = "nsxtPassword", default, skip_serializing_if = "Option::is_none")] pub nsxt_password: Option<String>, #[serde(rename = "vcenterUsername", default, skip_serializing_if = "Option::is_none")] pub vcenter_username: Option<String>, #[serde(rename = "vcenterPassword", default, skip_serializing_if = "Option::is_none")] pub vcenter_password: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { pub name: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HcxEnterpriseSiteList { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<HcxEnterpriseSite>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HcxEnterpriseSite { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<HcxEnterpriseSiteProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HcxEnterpriseSiteProperties { #[serde(rename = "activationKey", default, skip_serializing_if = "Option::is_none")] pub activation_key: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<hcx_enterprise_site_properties::Status>, } pub mod hcx_enterprise_site_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Status { Available, Consumed, Deactivated, Deleted, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ErrorResponse>, #[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")] pub additional_info: Vec<ErrorAdditionalInfo>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorAdditionalInfo { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub info: Option<serde_json::Value>, }
pub mod edge; pub mod graph_store; mod graph_store_test; mod storage; pub mod vertex;
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. include!("bindgen/types/gaspi_alloc_t.rs"); include!("bindgen/types/gaspi_atomic_value_t.rs"); include!("bindgen/types/gaspi_const_pointer_t.rs"); include!("bindgen/types/gaspi_group_t.rs"); include!("bindgen/types/gaspi_memory_description_t.rs"); include!("bindgen/types/gaspi_notification_id_t.rs"); include!("bindgen/types/gaspi_notification_t.rs"); include!("bindgen/types/gaspi_number_t.rs"); include!("bindgen/types/gaspi_offset_t.rs"); include!("bindgen/types/gaspi_pointer_t.rs"); include!("bindgen/types/gaspi_queue_id_t.rs"); include!("bindgen/types/gaspi_rank_t.rs"); include!("bindgen/types/gaspi_reduce_operation_t.rs"); include!("bindgen/types/gaspi_reduce_state_t.rs"); include!("bindgen/types/gaspi_segment_id_t.rs"); include!("bindgen/types/gaspi_size_t.rs"); include!("bindgen/types/gaspi_state_vector_t.rs"); include!("bindgen/types/gaspi_statistic_counter_t.rs"); include!("bindgen/types/gaspi_string_t.rs"); include!("bindgen/types/gaspi_time_t.rs"); include!("bindgen/types/gaspi_timeout_t.rs");
/* chapter 4 syntax and semantics move semantics */ fn main() { let n = vec![1, 2, 3]; fn take(n: Vec<i32>) { // what happens here isn’t important. } take(n); println!("n[0] is: {}", n[0]); } // output should be: /* n[0] is: 1 */
/// #proof of concept fn main() { welcome(); } fn welcome() { println!("welcome select a option \n"); println!("1- encrypt a string"); println!("2- encrypt a file"); println!("3- decrypt"); }
#![warn(clippy::pedantic, clippy::nursery)] // @formatter:off #![allow( clippy::module_name_repetitions, clippy::wildcard_imports, clippy::enum_glob_use, clippy::empty_enum, clippy::too_many_lines, clippy::non_ascii_literal, clippy::option_if_let_else, clippy::option_option, clippy::default_trait_access, clippy::must_use_candidate, clippy::similar_names, clippy::unit_arg, clippy::single_match_else, clippy::match_bool, // nursery clippy::missing_const_for_fn, clippy::collection_is_never_read, )] // @formatter:on use std::collections::{HashMap, HashSet}; use std::collections::hash_map::Entry; use std::fmt::{self, Debug}; use std::io::Write; use std::path::Path; use std::prelude::v1::Result::Ok; use std::sync::Arc; use std::time::Duration; use chrono::{DateTime, Local, Utc}; use discorsd::{Bot as _, BotExt, BotState, GuildCommands, shard}; use discorsd::async_trait; use discorsd::commands::*; use discorsd::errors::BotError; use discorsd::http::channel::{create_message, embed, MessageChannelExt}; use discorsd::http::ClientResult; use discorsd::model::channel::Channel; use discorsd::model::guild::{Guild, Integration}; use discorsd::model::ids::*; use discorsd::model::message::Message; use discorsd::model::interaction::Interaction; use discorsd::model::permissions::{Permissions, Role}; use discorsd::shard::dispatch::ReactionUpdate; use discorsd::shard::intents::Intents; use discorsd::shard::model::{Activity, ActivityType, Identify, UpdateStatus}; use itertools::Itertools; use log::{error, info}; use log::LevelFilter; use once_cell::sync::OnceCell; use serde::Deserialize; use tokio::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use crate::avalon::Avalon; use crate::avalon::game::AvalonGame; use crate::avalon::setup::SetupCommand; use crate::commands::info::InfoCommand; use crate::commands::ll::LowLevelCommand; use crate::commands::ping::PingCommand; use crate::commands::rules::RulesCommand; use crate::commands::start_game::StartGameCommand; use crate::commands::system_info::SysInfoCommand; use crate::commands::test::TestCommand; use crate::commands::unpin::UnpinCommand; use crate::commands::uptime::UptimeCommand; use crate::coup::Coup; use crate::hangman::Hangman; #[macro_use] mod macros; mod commands; mod avalon; mod avalon2; mod hangman; mod coup; pub mod utils; pub mod games; #[derive(Deserialize)] pub struct Config { token: String, owner: UserId, channel: ChannelId, guild: GuildId, } impl Debug for Config { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Config") .field("steadfast_id", &self.owner) .field("dev_channel", &self.channel) .field("guild_id", &self.guild) .finish_non_exhaustive() } } pub struct Bot { config: Config, avalon_games: RwLock<HashMap<GuildId, Avalon>>, // avalon_games2: RwLock<HashMap<GuildId, avalon2::Avalon>>, coup_games: RwLock<HashMap<GuildId, Coup>>, hangman_games: RwLock<HashMap<ChannelId, Hangman>>, // todo this needs to also track which game they're in for it to be robust // todo but also, is this needed at all anymore? user_games: RwLock<HashMap<UserId, HashSet<GuildId>>>, first_log_in: OnceCell<DateTime<Utc>>, log_in: RwLock<Option<DateTime<Utc>>>, } impl Bot { fn new(config: Config) -> Self { Self { config, avalon_games: Default::default(), // avalon_games2: Default::default(), coup_games: Default::default(), hangman_games: Default::default(), user_games: Default::default(), first_log_in: Default::default(), log_in: Default::default(), } } } #[tokio::main] async fn main() -> shard::ShardResult<()> { env_logger::builder() .format(|f, record| writeln!(f, "{} [{}] {}", Local::now().format("%d %T"), record.level(), record.args(), )) .filter(None, LevelFilter::Info) .init(); tokio::spawn(async { use tokio::{fs::File, io::AsyncWriteExt, time::sleep}; loop { match File::create("../uptime.txt").await { Ok(mut file) => match file.write_all(format!("{:?}", Utc::now()).as_bytes()).await { Ok(()) => info!("Updated uptime file"), Err(e) => error!("Error writing uptime file {}", e), } Err(e) => error!("Error opening uptime file {}", e), } // write file every 15 mins sleep(Duration::from_secs(60 * 15)).await; } }); let path = if std::env::args().any(|arg| arg == "--dev") { "config-dev.json" } else { "config.json" }; let config = std::fs::read_to_string(path).expect("Could not find config file"); let config: Config = serde_json::from_str(&config).expect("Could not read config file"); Bot::new(config).run().await } type Result<T, E = BotError> = std::result::Result<T, E>; #[async_trait] impl discorsd::Bot for Bot { fn token(&self) -> String { self.config.token.clone() } fn identify(&self) -> Identify { Identify::new(self.token()) .add_intents(Intents::MESSAGE_CONTENT) .presence(UpdateStatus::with_activity( Activity::for_bot("Avalon - try /start", ActivityType::Game) )) } fn global_commands() -> &'static [&'static dyn SlashCommandRaw<Bot=Self>] { &[ &InfoCommand, &PingCommand, &UptimeCommand, &SysInfoCommand, &RulesCommand, &UnpinCommand, &TestCommand, ] } fn guild_commands() -> Vec<Box<dyn SlashCommandRaw<Bot=Self>>> { let mut vec = commands::commands(); vec.extend(avalon::commands()); vec } async fn ready(&self, state: Arc<BotState<Self>>) -> Result<()> { if let Err(now) = self.first_log_in.set(Utc::now()) { *self.log_in.write().await = Some(now); } state.bot.config.channel.send(&state, embed(|e| { e.title("Avalon Bot is logged on!"); e.timestamp_now(); e.url("https://github.com/Andrew-Schwartz/AvalonBot"); })).await?; let message = state.bot.config.channel.send(&state, create_message(|m| { m.attachment(Path::new("images/avalon/avalonLogo.png")); m.embed(|e| { e.title("ASDASUDASDSAD"); // e.image(Path::new("images/avalon/board/R.jpg")); }); m.content("ASDSAD"); })).await?; // tokio::time::sleep(Duration::from_secs(5)).await; // state.client.trigger_typing(state.bot.config.channel).await?; // state.client.add_pinned_message(message.channel, message.id).await?; message.react(&state.client, '🙂').await?; Ok(()) } async fn resumed(&self, state: Arc<BotState<Self>>) -> Result<()> { state.bot.config.channel.send(&state, embed(|e| { e.title("Avalon Bot has resumed"); e.timestamp_now(); })).await?; Ok(()) } async fn guild_create(&self, guild: Guild, state: Arc<BotState<Self>>) -> Result<()> { info!("Guild Create: {} ({})", guild.name.as_ref().unwrap(), guild.id); self.avalon_games.write().await.entry(guild.id).or_default(); state.client.bulk_overwrite_guild_commands( state.application_id(), guild.id, Vec::new(), ).await?; state.register_guild_commands(&guild, [Box::new(StartGameCommand(guild.id)) as _]).await?; self.initialize_guild_commands(&guild, &state).await?; self.config.channel.send(&state, format!( "🎉 Joined new guild **{}** (`{}`) 🎉", guild.name.as_ref().unwrap(), guild.id, )).await?; Ok(()) } async fn message_create(&self, message: Message, state: Arc<BotState<Self>>) -> Result<()> { match message.content.as_str() { "!timestamp" => { message.channel.send( &state, format!("You created your account at {}", message.author.id.timestamp()), ).await?; } "!lots" => { let user = state.user().await; message.channel.send(&state, embed(|e| { e.image("english_channel.jpg"); e.thumbnail("av_bot_dev.png"); e.authored_by(&user); e.footer("look at my foot", "av_bot_dev.png"); for i in 0..6 { match i % 3 { 0 => e.add_inline_field("left col", i), 1 => e.add_blank_inline_field(), 2 => e.field(("right col", format!("i = {}", i), true)), _ => unreachable!(), }; } })).await?; } "!log" => { info!("{:#?}", self.debug().await); message.channel.send(&state, "logged!").await?; } "!cache" => { info!("{:#?}", state.cache.debug().await); message.channel.send(&state, "logged!").await?; } // "!commands" => { // let commands = state.slash_commands.read().await; // for (guild, commands) in commands.iter() { // let commands = commands.read().await; // println!("\nGUILD {}\n------------------------------", guild); // for command in commands.iter() { // println!("command = {:?}", command); // } // } // println!("\nEXISTING COMMANDS\n------------------------------"); // let commands = state.client.get_guild_commands( // state.application_id(), // message.guild_id.unwrap(), // ).await?; // for command in commands { // println!("command = {:?}", command); // } // message.channel.send(&state, "logged!").await?; // } _ => {} } Ok(()) } async fn interaction(&self, interaction: Interaction, state: Arc<BotState<Self>>) -> Result<()> { Self::handle_interaction(interaction, state).await } async fn reaction(&self, reaction: ReactionUpdate, state: Arc<BotState<Self>>) -> Result<()> { // println!("reaction = {:?}", reaction); let mut results = Vec::new(); let commands = state.reaction_commands.read().await.iter() .filter(|rc| rc.applies(&reaction)) .cloned() .collect_vec(); for command in commands { let result = command.run(Arc::clone(&state), reaction.clone()).await; results.push(result); } for res in results { res?; } Ok(()) } async fn integration_update(&self, guild_id: GuildId, integration: Integration, state: Arc<BotState<Self>>) -> Result<()> { info!("Guild Integration Update: {:?}", integration); let guild = state.cache.guild(guild_id).await.unwrap(); self.initialize_guild_commands(&guild, &state).await?; let channels = state.cache.guild_channels(guild_id, Channel::text).await; let channel = channels.iter().find(|c| c.name == "general") .unwrap_or_else(|| channels.iter().next().unwrap()); channel.send(&state, "Slash Commands are now enabled!").await?; Ok(()) } // todo should just be one method but have an enum for Create/Update/Delete async fn role_create(&self, guild: GuildId, role: Role, state: Arc<BotState<Self>>) -> Result<()> { println!("updating unpin perms"); // state.global_command_id::<UnpinCommand>() // .await // .edit_permissions(state, guild, vec![CommandPermissions { // id: role.id.into(), // permission: unpin_perms(&role), // }]) // .await?; Ok(()) } async fn role_update(&self, guild: GuildId, role: Role, state: Arc<BotState<Self>>) -> Result<()> { println!("updating unpin perms"); // state.global_command_id::<UnpinCommand>() // .await // .edit_permissions(state, guild, vec![CommandPermissions { // id: role.id.into(), // permission: unpin_perms(&role), // }]) // .await?; Ok(()) } async fn error(&self, error: BotError, state: Arc<BotState<Self>>) { // todo can probably deal with the error for real error!("{}", error.display_error(&state).await); } } impl Bot { /// The first time connecting to a guild, run this to delete any commands Discord has saved from /// the last time the bot was started // todo move to BotExt or smth async fn initialize_guild_commands( &self, guild: &Guild, state: &BotState<Self>, ) -> ClientResult<()> { // this should be only place that writes to first level of `commands` let first_time = match state.slash_commands.write().await.entry(guild.id) { Entry::Vacant(vacant) => { vacant.insert(Default::default()); true } Entry::Occupied(_) => false, }; if first_time { let commands = state.slash_commands.read().await; let mut commands = commands.get(&guild.id).unwrap().write().await; let rcs = state.reaction_commands.write().await; Self::reset_guild_command_perms( state, guild.id, &mut commands, rcs, ).await?; // set up perms for `/unpin` // let unpin_command = state.global_command_id::<UnpinCommand>(); // let disallow = guild.roles.iter() // .filter(|r| !unpin_perms(r)) // .map(Role::id); // unpin_command.disallow_roles(&state, guild.id, disallow).await?; // unpin_command.allow_users(&state, guild.id, &[guild.owner_id]).await?; if guild.id == self.config.guild { println!("guild = {:?}", guild); // `/ll` only in testing server let command = state.client.create_guild_command( state.application_id(), guild.id, LowLevelCommand.command(), ).await?; commands.insert(command.id(), Box::new(LowLevelCommand)); println!("ll perms"); // command.id.allow_users(&state, guild.id, &[self.config.owner]).await?; let command = state.client.create_guild_command( state.application_id(), guild.id, TestCommand.command(), ).await?; commands.insert(command.id(), Box::new(TestCommand)); let command = state.client.create_guild_command( state.application_id(), guild.id, SetupCommand.command(), ).await?; commands.insert(command.id(), Box::new(SetupCommand)); } } Ok(()) } async fn reset_guild_command_perms( state: &BotState<Self>, guild: GuildId, commands: &mut RwLockWriteGuard<'_, GuildCommands<Self>>, mut reaction_commands: RwLockWriteGuard<'_, Vec<Box<dyn ReactionCommand<Self>>>>, ) -> ClientResult<()> { reaction_commands.retain(|rc| !AvalonGame::is_reaction_command(rc.as_ref(), guild)); drop(reaction_commands); let app = state.application_id(); let guild_commands = Self::guild_commands(); let guild_commands: GuildCommands<_> = state.client.bulk_overwrite_guild_commands( app, guild, guild_commands.iter().map(|c| c.command()).collect(), ).await .unwrap() .into_iter() .map(|c| c.id()) .zip(guild_commands) .collect(); // let command_names = guild_commands.iter() // .map(|(&id, command)| (command.name(), id)) // .collect(); **commands = guild_commands; // *state.command_names.write().await // .entry(guild) // .or_default() = RwLock::new(command_names); // todo // clear any left over perms // guild.batch_edit_permissions(state, vec![]).await?; Ok(()) } pub async fn most_recent_login(&self) -> Option<DateTime<Utc>> { if let Some(time) = *self.log_in.read().await { Some(time) } else { self.first_log_in.get().copied() } } pub async fn debug(&self) -> DebugBot<'_> { let Self { config, coup_games, hangman_games, first_log_in: ready, log_in: resume, avalon_games: games, user_games } = self; #[allow(clippy::mixed_read_write_in_expression)] DebugBot { config, games: games.read().await, coup_games: coup_games.read().await, hangman_games: hangman_games.read().await, user_games: user_games.read().await, ready: ready.get(), resume: resume.read().await, } } } #[derive(Debug)] // todo remove when rust remembers that formatting this struct uses it 🙃 #[allow(dead_code)] pub struct DebugBot<'a> { config: &'a Config, games: RwLockReadGuard<'a, HashMap<GuildId, Avalon>>, coup_games: RwLockReadGuard<'a, HashMap<GuildId, Coup>>, hangman_games: RwLockReadGuard<'a, HashMap<ChannelId, Hangman>>, user_games: RwLockReadGuard<'a, HashMap<UserId, HashSet<GuildId>>>, ready: Option<&'a DateTime<Utc>>, resume: RwLockReadGuard<'a, Option<DateTime<Utc>>>, } fn unpin_perms(role: &Role) -> bool { role.permissions.intersects( Permissions::ADMINISTRATOR | Permissions::MANAGE_CHANNELS | Permissions::MANAGE_GUILD | Permissions::MANAGE_MESSAGES ) }
use std::time::Duration; use bonsaidb_core::{ connection::{AccessPolicy, Connection, ServerConnection}, document::KeyId, permissions::{Permissions, Statement}, test_util::{ Basic, BasicByBrokenParentId, BasicByParentId, BasicCollectionWithNoViews, BasicCollectionWithOnlyBrokenParentId, BasicSchema, HarnessTest, TestDirectory, }, }; use config::Configuration; use super::*; use crate::Database; struct TestHarness { _directory: TestDirectory, db: Database<BasicSchema>, } impl TestHarness { async fn new(test: HarnessTest) -> anyhow::Result<Self> { let directory = TestDirectory::new(format!("local-{}", test)); let storage = Storage::open_local(&directory, Configuration::default()).await?; storage.register_schema::<BasicSchema>().await?; storage.create_database::<BasicSchema>("tests").await?; let db = storage.database("tests").await?; Ok(Self { _directory: directory, db, }) } const fn server_name() -> &'static str { "local" } fn server(&self) -> &'_ Storage { self.db.storage() } async fn connect_with_permissions( &self, permissions: Vec<Statement>, _label: &str, ) -> anyhow::Result<Database<BasicSchema>> { Ok(self .db .with_effective_permissions(Permissions::from(permissions))) } async fn connect(&self) -> anyhow::Result<Database<BasicSchema>> { Ok(self.db.clone()) } pub async fn shutdown(&self) -> anyhow::Result<()> { Ok(()) } } bonsaidb_core::define_connection_test_suite!(TestHarness); #[cfg(feature = "pubsub")] bonsaidb_core::define_pubsub_test_suite!(TestHarness); #[cfg(feature = "keyvalue")] bonsaidb_core::define_kv_test_suite!(TestHarness); #[test] fn integrity_checks() -> anyhow::Result<()> { let path = TestDirectory::new("integrity-checks"); // To ensure full cleanup between each block, each runs in its own runtime; // Add a doc with no views installed { let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build()?; rt.block_on(async { { let db = Database::<BasicCollectionWithNoViews>::open_local( &path, Configuration::default(), ) .await?; let collection = db.collection::<BasicCollectionWithNoViews>(); collection.push(&Basic::default().with_parent_id(1)).await?; } tokio::time::sleep(Duration::from_millis(100)).await; // TODO need to be able to shut down a local database, including background jobs. Result::<(), anyhow::Error>::Ok(()) }) .unwrap(); } // Connect with a new view and see the automatic update with a query { let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build()?; rt.block_on(async { let db = Database::<BasicCollectionWithOnlyBrokenParentId>::open_local( &path, Configuration::default(), ) .await?; // Give the integrity scanner time to run if it were to run (it shouldn't in this configuration). tokio::time::sleep(Duration::from_millis(100)).await; // NoUpdate should return data without the validation checker having run. assert_eq!( db.view::<BasicByBrokenParentId>() .with_access_policy(AccessPolicy::NoUpdate) .query() .await? .len(), 0 ); // Regular query should show the correct data assert_eq!(db.view::<BasicByBrokenParentId>().query().await?.len(), 1); tokio::time::sleep(Duration::from_millis(100)).await; // TODO need to be able to shut down a local database, including background jobs. Result::<(), anyhow::Error>::Ok(()) }) .unwrap(); } // Connect with a fixed view, and wait for the integrity scanner to work { let rt = tokio::runtime::Builder::new_current_thread() .enable_all() .build()?; rt.block_on(async { let db = Database::<Basic>::open_local( &path, Configuration { views: config::Views { check_integrity_on_open: true, }, ..Configuration::default() }, ) .await?; for _ in 0_u8..10 { tokio::time::sleep(Duration::from_millis(1000)).await; if db .view::<BasicByParentId>() .with_access_policy(AccessPolicy::NoUpdate) .with_key(Some(1)) .query() .await? .len() == 1 { return Result::<(), anyhow::Error>::Ok(()); } } panic!("Integrity checker didn't run in the allocated time") }) .unwrap(); } Ok(()) } #[test] fn encryption() -> anyhow::Result<()> { let path = TestDirectory::new("encryption"); let document_header = { let rt = tokio::runtime::Runtime::new()?; rt.block_on(async { let db = Database::<Basic>::open_local(&path, Configuration::default()).await?; let document_header = db .collection::<Basic>() .push_encrypted(&Basic::new("hello"), KeyId::Master) .await?; // Retrieve the document, showing that it was stored successfully. let doc = db .collection::<Basic>() .get(document_header.id) .await? .expect("doc not found"); assert_eq!(&doc.contents::<Basic>()?.value, "hello"); Result::<_, anyhow::Error>::Ok(document_header) })? }; // Verify the header shows that it's encrypted. assert!(matches!( document_header.encryption_key, Some(KeyId::Master) )); // By resetting the encryption key, we should be able to force an error in // decryption, which proves that the document was encrypted. To ensure the // server starts up and generates a new key, we must delete the sealing key. std::fs::remove_file(path.join("master-keys"))?; let rt = tokio::runtime::Runtime::new()?; rt.block_on(async move { let db = Database::<Basic>::open_local(&path, Configuration::default()).await?; // Try retrieving the document, but expect an error decrypting. if let Err(bonsaidb_core::Error::Database(err)) = db.collection::<Basic>().get(document_header.id).await { assert!(err.contains("vault")); } else { panic!("successfully retrieved encrypted document without keys"); } Result::<_, anyhow::Error>::Ok(()) })?; Ok(()) } #[test] #[cfg(feature = "keyvalue")] fn expiration_after_close() -> anyhow::Result<()> { use bonsaidb_core::{kv::Kv, test_util::TimingTest}; loop { let path = TestDirectory::new("expiration-after-close"); // To ensure full cleanup between each block, each runs in its own runtime; let timing = TimingTest::new(Duration::from_millis(500)); // Set a key with an expiration, then close it. Then try to validate it // exists after opening, and then expires at the correct time. { let rt = tokio::runtime::Runtime::new()?; rt.block_on(async { let db = Database::<()>::open_local(&path, Configuration::default()).await?; db.set_key("a", &0_u32) .expire_in(Duration::from_secs(3)) .await?; Result::<(), anyhow::Error>::Ok(()) })?; } { let rt = tokio::runtime::Runtime::new()?; let retry = rt.block_on(async { let db = Database::<()>::open_local(&path, Configuration::default()).await?; if timing.elapsed() > Duration::from_secs(1) { return Ok(true); } assert_eq!(db.get_key("a").into().await?, Some(0_u32)); timing.wait_until(Duration::from_secs(4)).await; assert!(db.get_key("a").await?.is_none()); Result::<bool, anyhow::Error>::Ok(false) })?; if retry { println!("Retrying expiration_after_close because it was too slow"); continue; } } break; } Ok(()) }
/// Implementation of CART regression / classification trees, based on Elements of /// statistical learning (ESL). use ndarray::prelude::*; /// Defines a decision tree region, based on a given feature space and the associated /// labels. #[derive(PartialEq, Debug, Clone)] struct Region { x: Array2<f64>, y: Array1<f64>, c: f64, } impl Region { /// Construct a region from feature matrix and labels represented by a 2D and 1D array. pub fn from_arrays(x: Array2<f64>, y: Array1<f64>) -> Self { if x.nrows() != y.shape()[0] { panic!( "Feature matrix ({}) and labels ({}) have different number of samples!", x.nrows(), y.shape()[0] ) } let c = y.mean().unwrap_or(0.0); Self { x, y, c } } /// Number of samples of the region pub fn size(&self) -> usize { self.x.nrows() } /// Computes the prediction of samples falling into this region using the mean. pub fn prediction(&self) -> f64 { self.c } /// Partitions a region into two new regions, based on a splitting column j and a /// splitting point s pub fn partition(&self, j: usize, s: f64) -> (Region, Region) { let sel = self.x.index_axis(Axis(1), j); let (s1, s2): (Vec<(_, _)>, Vec<(_, _)>) = sel.indexed_iter().partition(|(_, elem)| elem <= &&s); let s1: Vec<usize> = s1.iter().map(|(dim, _)| *dim).collect(); let s2: Vec<usize> = s2.iter().map(|(dim, _)| *dim).collect(); let x1 = self.x.select(Axis(0), s1.as_slice()); let y1 = self.y.select(Axis(0), s1.as_slice()); let x2 = self.x.select(Axis(0), s2.as_slice()); let y2 = self.y.select(Axis(0), s2.as_slice()); (Region::from_arrays(x1, y1), Region::from_arrays(x2, y2)) } /// Performs an exhaustive search of all possible splitting points for a given column. /// The splitting point with the lowest costs (squared difference between actual and /// prediction of both resulting regions) fn choose_per_column(&self, j: usize) -> Result<(f64, f64), String> { const PRECISION: f64 = 10_000_000.; let selected_column = self.x.index_axis(Axis(1), j); let uniques = selected_column.fold(std::collections::HashSet::new(), |mut accu, v| { accu.insert((*v * PRECISION) as i64); accu }); let unique_values: Vec<f64> = uniques.into_iter().map(|x| x as f64 / PRECISION).collect(); let mut cost_hat = f64::MAX; let mut s_hat = None; for s in unique_values { let (r1, r2) = self.partition(j, s); let c1 = r1.prediction(); let c2 = r2.prediction(); let cost = splitting_cost(r1.y, r2.y, c1, c2); if cost < cost_hat { cost_hat = cost; s_hat = Some(s); } } match s_hat { Some(s) => Ok((s, cost_hat)), None => Err("Could not find any possible split".to_string()), } } /// Chooses the best column with the best splitting point by selecting the combination /// with the lowest costs. pub fn choose_overall(&self) -> Result<SplitBoundary, String> { let last_col = self.x.ncols(); let mut j_hat = None; let mut s_hat = None; let mut cost_hat = f64::MAX; for j in 0..last_col { let (s, cost) = self.choose_per_column(j)?; if cost < cost_hat { j_hat = Some(j); s_hat = Some(s); cost_hat = cost; } } match (j_hat, s_hat) { (Some(j), Some(s)) => Ok(SplitBoundary { j, s }), _ => Err("Could not find splitting variable and splitting point".to_string()), } } } /// Calculates the cost of splitting a region into two regions fn splitting_cost(y1: Array1<f64>, y2: Array1<f64>, c1: f64, c2: f64) -> f64 { (y1 - c1).mapv(|x| x.powi(2)).sum() + (y2 - c2).mapv(|x| x.powi(2)).sum() } /// Stores decision tree splits as column/splitting point combinations pub struct SplitBoundary { j: usize, s: f64, } type NodeId = usize; /// Elements of a decision tree struct Node { region: Region, depth: usize, boundary: Option<SplitBoundary>, left_child: Option<NodeId>, right_child: Option<NodeId>, } impl Node { pub fn new(region: Region, depth: usize) -> Self { Node { region, depth, boundary: None, left_child: None, right_child: None, } } } /// Decision tree for regression /// The nodes are stored in a vector and node relationships are defined by using the /// positions of the nodes within the vector. pub struct Tree { nodes: Vec<Node>, } impl Tree { pub fn new(x: Array2<f64>, y: Array1<f64>, min_node_size: usize, max_depth: usize) -> Self { let root = Node::new(Region::from_arrays(x, y), 0); let mut result = Tree { nodes: Vec::new() }; let root_id = result.insert_node(root); let mut stack = vec![root_id]; while let Some(current_node_id) = stack.pop() { let current_node = &mut result.nodes[current_node_id]; if current_node.depth >= max_depth { continue } let boundary = current_node.region.choose_overall().unwrap(); let (r1, r2) = current_node.region.partition(boundary.j, boundary.s); if r1.size() >= min_node_size && r2.size() >= min_node_size { let depth = current_node.depth + 1; let n1 = Node::new(r1, depth); let n2 = Node::new(r2, depth); let (n1_id, n2_id) = result.update_to_interior(boundary, current_node_id, n1, n2); stack.push(n1_id); stack.push(n2_id); } } result } fn insert_node(&mut self, n: Node) -> NodeId { let result = self.nodes.len(); self.nodes.push(n); result } fn update_to_interior( &mut self, boundary: SplitBoundary, p_id: NodeId, n1: Node, n2: Node, ) -> (usize, usize) { let n1_id = self.insert_node(n1); let n2_id = self.insert_node(n2); let parent = &mut self.nodes[p_id]; parent.boundary = Some(boundary); parent.left_child = Some(n1_id); parent.right_child = Some(n2_id); (n1_id, n2_id) } pub fn predict(&self, x: Array1<f64>) -> f64 { let mut current_node = &self.nodes[0]; while let Some(ref boundary) = current_node.boundary { if x[boundary.j] <= boundary.s { current_node = &self.nodes[current_node.left_child.unwrap()] } else { current_node = &self.nodes[current_node.right_child.unwrap()] } } current_node.region.prediction() } pub fn size(&self) -> usize { self.nodes.len() } } #[cfg(test)] mod tests { use super::*; use ndarray::{arr1, Array1, Array2}; #[test] fn test_region_size() { let r = Region::from_arrays(Array2::<f64>::zeros((2, 2)), Array1::<f64>::zeros(2)); assert_eq!(2, r.size()) } #[test] fn test_region_prediction() { let r = Region::from_arrays(Array2::<f64>::zeros((2, 2)), arr1(&[1.0, 2.0])); assert_eq!(1.5, r.prediction()) } #[test] fn test_region_partition() { let x = arr2(&[[2., 3., 6.], [4., 5., 7.]]); let y = arr1(&[1., 1.]); let r = Region::from_arrays(x, y); let r1 = Region::from_arrays(arr2(&[[2., 3., 6.]]), arr1(&[1.])); let r2 = Region::from_arrays(arr2(&[[4., 5., 7.]]), arr1(&[1.])); let (act_r1, act_r2) = r.partition(0, 2.); assert_eq!(act_r1, r1); assert_eq!(act_r2, r2); } #[test] fn test_region_choose_per_column() { let features = &[[2., 3., 6.], [4., 3., 6.], [3., 3., 6.]]; let x = arr2(features); let y = arr1(&[1., 0., 1.]); let r = Region::from_arrays(x, y); let (s_act, _) = r.choose_per_column(0).unwrap(); assert_eq!(3., s_act); } #[test] fn test_region_choose_overall() { let features = &[[2., 2., 6.], [2., 4., 6.], [2., 3., 6.]]; let x = arr2(features); let y = arr1(&[1., 0., 1.]); let r = Region::from_arrays(x, y); let boundary = r.choose_overall().unwrap(); assert_eq!(1, boundary.j); assert_eq!(3., boundary.s); } #[test] fn test_splitting_cost() { let y1 = arr1(&[3., 2.]); let y2 = arr1(&[5., 9.]); let c1 = 2.5; let c2 = 7.; assert_eq!(0.5 + 8., splitting_cost(y1, y2, c1, c2)) } #[test] fn test_tree_new() { let features = &[[2., 2., 6.], [2., 4., 6.], [2., 3., 6.]]; let x = arr2(features); let y = arr1(&[0.9, 0., 1.]); let tree = Tree::new(x, y, 1, 3); assert_eq!(5, tree.nodes.len()); assert_eq!(1, tree.nodes[0].boundary.as_ref().unwrap().j); assert_eq!( 1.9, tree.nodes[tree.nodes[0].left_child.unwrap()].region.y.sum() ); } #[test] fn test_tree_predict() { let features = &[[2., 2., 6.], [2., 4., 6.], [2., 3., 6.]]; let x = arr2(features); let y = arr1(&[0.9, 0., 1.]); let tree = Tree::new(x, y, 1, 3); assert_eq!(0.9, tree.predict(arr1(&[2., 2., 6.,]))); assert_eq!(0., tree.predict(arr1(&[2., 4., 6.,]))); assert_eq!(1., tree.predict(arr1(&[2., 3., 6.,]))); } }
use crate::{grpc_server::RequestItem, GrpcServer, MockBuilder}; impl GrpcServer { /// Finds one or more matched requests for a given request builder. /// /// ## Returns /// * [`None`]: when the given [`MockBuilder`] is not registered using the `setup()` function. /// * Empty Vector: when no request was made that matches the builder, pub fn find(&self, r: &MockBuilder) -> Option<Vec<RequestItem>> { for item in self.rules.read().unwrap().iter() { if &item.rule == r { let mut result = Vec::default(); for i in &item.invocations { result.push(i.clone()); } return Some(result); } } None } /// Finds a single matched request for a given criteria /// /// ## Panics /// * No request matching the criteria (eg. No request receieved by the mock server) /// * When more than one request matches the criteria (in this case use [`GrpcServer::find`]) /// * When the criteria is inavlid (not registered with the server using the `setup()` function), pub fn find_one(&self, r: &MockBuilder) -> RequestItem { if let Some(m) = self.find(r) { match m.len() { 0 => panic!("No request maching the given criteria."), d if d > 1 => panic!("More then one request matching the criteria."), 1 => m[0].clone(), _ => todo!(), } } else { panic!("The given MockBuilder is not registered with the mock server."); } } /// Returns number of handled requests pub fn find_request_count(&self) -> u32 { let mut count = 0; for item in self.rules.read().unwrap().iter() { count += item.invocations_count; } count } /// Return number of rules registered with the server pub fn rules_len(&self) -> usize { self.rules.read().unwrap().iter().len() } /// Return number of umatched so far pub fn rules_unmatched(&self) -> usize { self.rules .read() .unwrap() .iter() .filter(|f| f.invocations_count == 0) .count() } } impl PartialEq for MockBuilder { fn eq(&self, other: &Self) -> bool { self.path == other.path && self.status_code == other.status_code && self.result == other.result } }
use crate::{ errors::{PeerStoreError, Result}, network_group::{Group, NetworkGroup}, peer_store::{ addr_manager::AddrManager, ban_list::BanList, types::{ip_to_network, AddrInfo, BannedAddr, MultiaddrExt, PeerInfo}, Behaviour, Multiaddr, PeerScoreConfig, ReportResult, Status, ADDR_COUNT_LIMIT, ADDR_TIMEOUT_MS, }, PeerId, SessionType, }; use ipnetwork::IpNetwork; use std::cell::{Ref, RefCell}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] pub struct PeerStore { addr_manager: AddrManager, ban_list: RefCell<BanList>, peers: RefCell<HashMap<PeerId, PeerInfo>>, score_config: PeerScoreConfig, } impl PeerStore { pub fn new(addr_manager: AddrManager, ban_list: BanList) -> Self { PeerStore { addr_manager, ban_list: RefCell::new(ban_list), peers: Default::default(), score_config: Default::default(), } } /// Add a peer and address into peer_store /// this method will assume peer is connected, which implies address is "verified". pub fn add_connected_peer( &mut self, peer_id: PeerId, addr: Multiaddr, session_type: SessionType, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); match self.peers.get_mut().entry(peer_id.to_owned()) { Entry::Occupied(mut entry) => { let mut peer = entry.get_mut(); peer.connected_addr = addr.clone(); peer.last_connected_at_ms = now_ms; peer.session_type = session_type; } Entry::Vacant(entry) => { let peer = PeerInfo::new(peer_id.to_owned(), addr.clone(), session_type, now_ms); entry.insert(peer); } } let score = self.score_config.default_score; if session_type.is_outbound() { self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), now_ms, score, )); } Ok(()) } /// Add discovered peer addresses /// this method will assume peer and addr is untrust since we have not connected to it. pub fn add_addr(&mut self, peer_id: PeerId, addr: Multiaddr) -> Result<()> { self.check_purge()?; let score = self.score_config.default_score; self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), 0, score, )); Ok(()) } pub fn addr_manager(&self) -> &AddrManager { &self.addr_manager } pub fn mut_addr_manager(&mut self) -> &mut AddrManager { &mut self.addr_manager } /// Report peer behaviours pub fn report(&mut self, peer_id: &PeerId, behaviour: Behaviour) -> Result<ReportResult> { if let Some(peer) = { let peers = self.peers.borrow(); peers.get(peer_id).map(ToOwned::to_owned) } { let key = peer.connected_addr.extract_ip_addr()?; let mut peer_addr = self.addr_manager.get_mut(&key).expect("peer addr exists"); let score = peer_addr.score.saturating_add(behaviour.score()); peer_addr.score = score; if score < self.score_config.ban_score { self.ban_addr( &peer.connected_addr, self.score_config.ban_timeout_ms, format!("report behaviour {:?}", behaviour), )?; return Ok(ReportResult::Banned); } } Ok(ReportResult::Ok) } pub fn remove_disconnected_peer(&mut self, peer_id: &PeerId) -> Option<PeerInfo> { self.peers.borrow_mut().remove(peer_id) } pub fn peer_status(&self, peer_id: &PeerId) -> Status { if self.peers.borrow().contains_key(peer_id) { Status::Connected } else { Status::Disconnected } } /// Get peers for outbound connection, this method randomly return non-connected peer addrs pub fn fetch_addrs_to_attempt(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); // get addrs that can attempt. self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) }) } /// Get peers for feeler connection, this method randomly return peer addrs that we never /// connected to. pub fn fetch_addrs_to_feeler(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; // get expired or never successed addrs. let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) && !peer_addr.had_connected(addr_expired_ms) }) } /// return valid addrs that success connected, used for discovery. pub fn fetch_random_addrs(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); // get success connected addrs. self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && (peers.contains_key(&peer_addr.peer_id) || peer_addr.had_connected(addr_expired_ms)) }) } /// Ban an addr pub(crate) fn ban_addr( &mut self, addr: &Multiaddr, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let network = ip_to_network(addr.extract_ip_addr()?.ip); self.ban_network(network, timeout_ms, ban_reason) } pub(crate) fn ban_network( &mut self, network: IpNetwork, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); let ban_addr = BannedAddr { address: network, ban_until: now_ms + timeout_ms, created_at: now_ms, ban_reason, }; self.mut_ban_list().ban(ban_addr); Ok(()) } pub fn is_addr_banned(&self, addr: &Multiaddr) -> bool { self.ban_list().is_addr_banned(addr) } pub fn ban_list(&self) -> Ref<BanList> { self.ban_list.borrow() } pub fn mut_ban_list(&mut self) -> &mut BanList { self.ban_list.get_mut() } /// check and try delete addrs if reach limit /// return Err if peer_store is full and can't be purge fn check_purge(&mut self) -> Result<()> { if self.addr_manager.count() < ADDR_COUNT_LIMIT { return Ok(()); } let now_ms = faketime::unix_time_as_millis(); let candidate_peers: Vec<_> = { // find candidate peers by network group let mut peers_by_network_group: HashMap<Group, Vec<_>> = HashMap::default(); for addr in self.addr_manager.addrs_iter() { let network_group = addr.addr.network_group(); peers_by_network_group .entry(network_group) .or_default() .push(addr); } let ban_score = self.score_config.ban_score; // find the largest network group peers_by_network_group .values() .max_by_key(|peers| peers.len()) .expect("largest network group") .iter() .filter(move |addr| addr.is_terrible(now_ms) || addr.score <= ban_score) .map(|addr| addr.ip_port()) .collect() }; if candidate_peers.is_empty() { return Err(PeerStoreError::EvictionFailed.into()); } for key in candidate_peers { self.addr_manager.remove(&key); } Ok(()) } }
#[doc = "Reader of register SPINLOCK14"] pub type R = crate::R<u32, super::SPINLOCK14>; impl R {}
#[doc = r"Register block"] #[repr(C)] pub struct FLT { #[doc = "0x00 - control register 1"] pub cr1: CR1, #[doc = "0x04 - control register 2"] pub cr2: CR2, #[doc = "0x08 - interrupt and status register"] pub isr: ISR, #[doc = "0x0c - interrupt flag clear register"] pub icr: ICR, #[doc = "0x10 - injected channel group selection register"] pub jchgr: JCHGR, #[doc = "0x14 - filter control register"] pub fcr: FCR, #[doc = "0x18 - data register for injected group"] pub jdatar: JDATAR, #[doc = "0x1c - data register for the regular channel"] pub rdatar: RDATAR, #[doc = "0x20 - analog watchdog high threshold register"] pub awhtr: AWHTR, #[doc = "0x24 - analog watchdog low threshold register"] pub awltr: AWLTR, #[doc = "0x28 - analog watchdog status register"] pub awsr: AWSR, #[doc = "0x2c - analog watchdog clear flag register"] pub awcfr: AWCFR, #[doc = "0x30 - Extremes detector maximum register"] pub exmax: EXMAX, #[doc = "0x34 - Extremes detector minimum register"] pub exmin: EXMIN, #[doc = "0x38 - conversion timer register"] pub fltcnvtimr: FLTCNVTIMR, _reserved_end: [u8; 0x44], } #[doc = "CR1 (rw) register accessor: control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1`] module"] pub type CR1 = crate::Reg<cr1::CR1_SPEC>; #[doc = "control register 1"] pub mod cr1; #[doc = "CR2 (rw) register accessor: control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr2`] module"] pub type CR2 = crate::Reg<cr2::CR2_SPEC>; #[doc = "control register 2"] pub mod cr2; #[doc = "ISR (r) register accessor: interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr`] module"] pub type ISR = crate::Reg<isr::ISR_SPEC>; #[doc = "interrupt and status register"] pub mod isr; #[doc = "ICR (rw) register accessor: interrupt flag clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`icr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`icr`] module"] pub type ICR = crate::Reg<icr::ICR_SPEC>; #[doc = "interrupt flag clear register"] pub mod icr; #[doc = "JCHGR (rw) register accessor: injected channel group selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jchgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`jchgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jchgr`] module"] pub type JCHGR = crate::Reg<jchgr::JCHGR_SPEC>; #[doc = "injected channel group selection register"] pub mod jchgr; #[doc = "FCR (rw) register accessor: filter control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fcr`] module"] pub type FCR = crate::Reg<fcr::FCR_SPEC>; #[doc = "filter control register"] pub mod fcr; #[doc = "JDATAR (r) register accessor: data register for injected group\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jdatar::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jdatar`] module"] pub type JDATAR = crate::Reg<jdatar::JDATAR_SPEC>; #[doc = "data register for injected group"] pub mod jdatar; #[doc = "RDATAR (r) register accessor: data register for the regular channel\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdatar::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdatar`] module"] pub type RDATAR = crate::Reg<rdatar::RDATAR_SPEC>; #[doc = "data register for the regular channel"] pub mod rdatar; #[doc = "AWHTR (rw) register accessor: analog watchdog high threshold register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`awhtr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`awhtr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`awhtr`] module"] pub type AWHTR = crate::Reg<awhtr::AWHTR_SPEC>; #[doc = "analog watchdog high threshold register"] pub mod awhtr; #[doc = "AWLTR (rw) register accessor: analog watchdog low threshold register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`awltr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`awltr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`awltr`] module"] pub type AWLTR = crate::Reg<awltr::AWLTR_SPEC>; #[doc = "analog watchdog low threshold register"] pub mod awltr; #[doc = "AWSR (r) register accessor: analog watchdog status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`awsr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`awsr`] module"] pub type AWSR = crate::Reg<awsr::AWSR_SPEC>; #[doc = "analog watchdog status register"] pub mod awsr; #[doc = "AWCFR (rw) register accessor: analog watchdog clear flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`awcfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`awcfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`awcfr`] module"] pub type AWCFR = crate::Reg<awcfr::AWCFR_SPEC>; #[doc = "analog watchdog clear flag register"] pub mod awcfr; #[doc = "EXMAX (r) register accessor: Extremes detector maximum register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exmax::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exmax`] module"] pub type EXMAX = crate::Reg<exmax::EXMAX_SPEC>; #[doc = "Extremes detector maximum register"] pub mod exmax; #[doc = "EXMIN (r) register accessor: Extremes detector minimum register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exmin::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`exmin`] module"] pub type EXMIN = crate::Reg<exmin::EXMIN_SPEC>; #[doc = "Extremes detector minimum register"] pub mod exmin; #[doc = "FLTCNVTIMR (r) register accessor: conversion timer register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fltcnvtimr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fltcnvtimr`] module"] pub type FLTCNVTIMR = crate::Reg<fltcnvtimr::FLTCNVTIMR_SPEC>; #[doc = "conversion timer register"] pub mod fltcnvtimr;
//! Implementation crate for `multiversion`. extern crate proc_macro; mod cfg; mod dispatcher; mod match_target; mod multiversion; mod target; mod util; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use syn::{parse::Nothing, parse_macro_input, punctuated::Punctuated, ItemFn}; #[proc_macro_attribute] pub fn multiversion( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let func = parse_macro_input!(input as ItemFn); match multiversion::make_multiversioned_fn(attr.into(), func) { Ok(tokens) => tokens.into_token_stream(), Err(err) => err.to_compile_error(), } .into() } #[proc_macro_attribute] pub fn target( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let target = parse_macro_input!(attr as syn::LitStr); let func = parse_macro_input!(input as ItemFn); match target::make_target_fn(target, func) { Ok(tokens) => tokens.into_token_stream(), Err(err) => err.to_compile_error(), } .into() } #[proc_macro_attribute] pub fn inherit_target( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { parse_macro_input!(attr as Nothing); let func = parse_macro_input!(input as ItemFn); quote! { __multiversion::inherit_target! { #func } } .into() } #[proc_macro] pub fn selected_target(input: proc_macro::TokenStream) -> proc_macro::TokenStream { parse_macro_input!(input as Nothing); quote! { __multiversion::FEATURES } .into() } #[proc_macro_attribute] pub fn target_cfg( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let attr = TokenStream::from(attr); let input = TokenStream::from(input); quote! { __multiversion::target_cfg!{ [#attr] #input } } .into() } #[proc_macro_attribute] pub fn target_cfg_attr( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let attr = TokenStream::from(attr); let input = TokenStream::from(input); quote! { __multiversion::target_cfg_attr!{ [#attr] #input } } .into() } #[proc_macro] pub fn target_cfg_f(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = TokenStream::from(input); quote! { __multiversion::target_cfg_f!{ #input } } .into() } #[proc_macro_attribute] pub fn target_cfg_impl( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let meta = parse_macro_input!(attr with Punctuated::parse_terminated); let input = TokenStream::from(input); let meta = cfg::transform(meta); quote! { #[cfg(#meta)] #input } .into() } #[proc_macro_attribute] pub fn target_cfg_attr_impl( attr: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { let mut meta = parse_macro_input!(attr with Punctuated::parse_terminated); let input = TokenStream::from(input); let attr = meta.pop().unwrap(); let meta = cfg::transform(meta); quote! { #[cfg_attr(#meta, #attr)] #input } .into() } #[proc_macro] pub fn target_cfg_f_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let meta = parse_macro_input!(input with Punctuated::parse_terminated); let meta = cfg::transform(meta); quote! { cfg!(#meta) } .into() } #[proc_macro] pub fn match_target(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = TokenStream::from(input); quote! { __multiversion::match_target!{ #input } } .into() } #[proc_macro] pub fn match_target_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let match_target = parse_macro_input!(input as match_target::MatchTarget); match_target.into_token_stream().into() }
mod structure; use crate::structure::Graph; use crate::structure::{AdjencyMatrix, AdjencyMatrixNotOriented}; type ValueType = u16; const NB_VERTEX: usize = 10; const INFINITE: ValueType = 0; const SRC: usize = 0; const DST: usize = 5; fn main() { let mut data = vec![INFINITE; NB_VERTEX * NB_VERTEX]; let mut graph = Graph::from_inner(&mut data, NB_VERTEX, INFINITE); graph.set_edge(0, 1, 85); graph.set_edge(0, 2, 217); graph.set_edge(0, 4, 173); graph.set_edge(1, 5, 80); graph.set_edge(2, 6, 186); graph.set_edge(2, 7, 103); graph.set_edge(3, 7, 183); graph.set_edge(4, 9, 502); graph.set_edge(5, 8, 250); graph.set_edge(7, 9, 167); graph.set_edge(8, 9, 84); println!("\nInput:"); println!("src: {:?}, dst: {:?}", SRC, DST); println!("{:?}", graph); let mut path = vec![0; NB_VERTEX]; let mut visited = vec![false; NB_VERTEX]; let mut distance = vec![0; NB_VERTEX]; println!("\nOutput:"); let cost = graph.dijkstra(SRC, DST, &mut visited, &mut distance); println!("cost from {} to {}: {:?}", SRC, DST, cost); let cost = graph.dijkstra_with_path(SRC, DST, &mut path, &mut visited, &mut distance); println!("cost + path from {} to {}: {:?}", SRC, DST, cost); let cost = graph.dijkstra_from_src(SRC, &mut visited, &mut distance); println!("cost from {}:", SRC); for (i, x) in cost.iter().enumerate() { println!(" to {}: {:?}", i, x); } let cost = graph.dijkstra_from_src_with_path(SRC, &mut path, &mut visited, &mut distance); println!("cost + path from {}:", SRC); for x in cost { println!(" {:?}", x); } }
use ndarray::Array2; use util::*; fn main() -> Result<(), Box<dyn std::error::Error>> { let timer = Timer::new(); let mut input = input::lines::<u8>(&std::env::args().nth(1).unwrap()); input.sort(); let mut adapters = vec![0; 1]; adapters.append(&mut input); adapters.push(adapters[adapters.len() - 1] + 3); let len = adapters.len(); let mut adjacency = Array2::<usize>::zeros((len, len)); for i in 0..len - 1 { for j in i + 1..len { if adapters[j] - adapters[i] <= 3 { adjacency[[i, j]] += 1; } else { break; } } } let mut sum: usize = 0; let mut res = adjacency.clone(); for _ in 0..len - 2 { res = res.dot(&adjacency); sum += res[[0, len - 1]] as usize; } println!("{}", sum); timer.print(); Ok(()) }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use super::{models, API_VERSION}; #[non_exhaustive] #[derive(Debug, thiserror :: Error)] #[allow(non_camel_case_types)] pub enum Error { #[error(transparent)] Catalog_GetSecret(#[from] catalog::get_secret::Error), #[error(transparent)] Catalog_CreateSecret(#[from] catalog::create_secret::Error), #[error(transparent)] Catalog_UpdateSecret(#[from] catalog::update_secret::Error), #[error(transparent)] Catalog_DeleteSecret(#[from] catalog::delete_secret::Error), #[error(transparent)] Catalog_DeleteAllSecrets(#[from] catalog::delete_all_secrets::Error), #[error(transparent)] Catalog_GetExternalDataSource(#[from] catalog::get_external_data_source::Error), #[error(transparent)] Catalog_ListExternalDataSources(#[from] catalog::list_external_data_sources::Error), #[error(transparent)] Catalog_GetCredential(#[from] catalog::get_credential::Error), #[error(transparent)] Catalog_ListCredentials(#[from] catalog::list_credentials::Error), #[error(transparent)] Catalog_GetProcedure(#[from] catalog::get_procedure::Error), #[error(transparent)] Catalog_ListProcedures(#[from] catalog::list_procedures::Error), #[error(transparent)] Catalog_GetTable(#[from] catalog::get_table::Error), #[error(transparent)] Catalog_ListTables(#[from] catalog::list_tables::Error), #[error(transparent)] Catalog_GetTableType(#[from] catalog::get_table_type::Error), #[error(transparent)] Catalog_ListTableTypes(#[from] catalog::list_table_types::Error), #[error(transparent)] Catalog_GetView(#[from] catalog::get_view::Error), #[error(transparent)] Catalog_ListViews(#[from] catalog::list_views::Error), #[error(transparent)] Catalog_GetTableStatistic(#[from] catalog::get_table_statistic::Error), #[error(transparent)] Catalog_ListTableStatistics(#[from] catalog::list_table_statistics::Error), #[error(transparent)] Catalog_GetTablePartition(#[from] catalog::get_table_partition::Error), #[error(transparent)] Catalog_ListTablePartitions(#[from] catalog::list_table_partitions::Error), #[error(transparent)] Catalog_ListTypes(#[from] catalog::list_types::Error), #[error(transparent)] Catalog_GetTableValuedFunction(#[from] catalog::get_table_valued_function::Error), #[error(transparent)] Catalog_ListTableValuedFunctions(#[from] catalog::list_table_valued_functions::Error), #[error(transparent)] Catalog_GetAssembly(#[from] catalog::get_assembly::Error), #[error(transparent)] Catalog_ListAssemblies(#[from] catalog::list_assemblies::Error), #[error(transparent)] Catalog_GetSchema(#[from] catalog::get_schema::Error), #[error(transparent)] Catalog_ListSchemas(#[from] catalog::list_schemas::Error), #[error(transparent)] Catalog_GetDatabase(#[from] catalog::get_database::Error), #[error(transparent)] Catalog_ListDatabases(#[from] catalog::list_databases::Error), } pub mod catalog { use super::{models, API_VERSION}; pub async fn get_secret( operation_config: &crate::OperationConfig, database_name: &str, secret_name: &str, ) -> std::result::Result<models::USqlSecret, get_secret::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/secrets/{}", operation_config.base_path(), database_name, secret_name ); let mut url = url::Url::parse(url_str).map_err(get_secret::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_secret::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_secret::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_secret::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlSecret = serde_json::from_slice(rsp_body).map_err(|source| get_secret::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_secret::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_secret { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_secret( operation_config: &crate::OperationConfig, database_name: &str, secret_name: &str, parameters: &models::DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters, ) -> std::result::Result<models::USqlSecret, create_secret::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/secrets/{}", operation_config.base_path(), database_name, secret_name ); let mut url = url::Url::parse(url_str).map_err(create_secret::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_secret::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(create_secret::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_secret::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_secret::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlSecret = serde_json::from_slice(rsp_body).map_err(|source| create_secret::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(create_secret::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod create_secret { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update_secret( operation_config: &crate::OperationConfig, database_name: &str, secret_name: &str, parameters: &models::DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters, ) -> std::result::Result<models::USqlSecret, update_secret::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/secrets/{}", operation_config.base_path(), database_name, secret_name ); let mut url = url::Url::parse(url_str).map_err(update_secret::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update_secret::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); req_builder = req_builder.header("content-type", "application/json"); let req_body = azure_core::to_json(parameters).map_err(update_secret::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update_secret::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(update_secret::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlSecret = serde_json::from_slice(rsp_body).map_err(|source| update_secret::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(update_secret::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod update_secret { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_secret( operation_config: &crate::OperationConfig, database_name: &str, secret_name: &str, ) -> std::result::Result<(), delete_secret::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/secrets/{}", operation_config.base_path(), database_name, secret_name ); let mut url = url::Url::parse(url_str).map_err(delete_secret::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_secret::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete_secret::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_secret::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); Err(delete_secret::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete_secret { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete_all_secrets( operation_config: &crate::OperationConfig, database_name: &str, ) -> std::result::Result<(), delete_all_secrets::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/catalog/usql/databases/{}/secrets", operation_config.base_path(), database_name); let mut url = url::Url::parse(url_str).map_err(delete_all_secrets::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete_all_secrets::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete_all_secrets::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(delete_all_secrets::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); Err(delete_all_secrets::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod delete_all_secrets { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_external_data_source( operation_config: &crate::OperationConfig, database_name: &str, external_data_source_name: &str, ) -> std::result::Result<models::USqlExternalDataSource, get_external_data_source::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/externaldatasources/{}", operation_config.base_path(), database_name, external_data_source_name ); let mut url = url::Url::parse(url_str).map_err(get_external_data_source::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_external_data_source::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_external_data_source::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_external_data_source::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlExternalDataSource = serde_json::from_slice(rsp_body) .map_err(|source| get_external_data_source::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_external_data_source::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_external_data_source { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_external_data_sources( operation_config: &crate::OperationConfig, database_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlExternalDataSourceList, list_external_data_sources::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/externaldatasources", operation_config.base_path(), database_name ); let mut url = url::Url::parse(url_str).map_err(list_external_data_sources::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_external_data_sources::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_external_data_sources::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_external_data_sources::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlExternalDataSourceList = serde_json::from_slice(rsp_body) .map_err(|source| list_external_data_sources::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_external_data_sources::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_external_data_sources { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_credential( operation_config: &crate::OperationConfig, database_name: &str, credential_name: &str, ) -> std::result::Result<models::USqlCredential, get_credential::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/credentials/{}", operation_config.base_path(), database_name, credential_name ); let mut url = url::Url::parse(url_str).map_err(get_credential::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_credential::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_credential::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_credential::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlCredential = serde_json::from_slice(rsp_body).map_err(|source| get_credential::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_credential::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_credential { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_credentials( operation_config: &crate::OperationConfig, database_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlCredentialList, list_credentials::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/credentials", operation_config.base_path(), database_name ); let mut url = url::Url::parse(url_str).map_err(list_credentials::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_credentials::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_credentials::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_credentials::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlCredentialList = serde_json::from_slice(rsp_body) .map_err(|source| list_credentials::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_credentials::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_credentials { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_procedure( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, procedure_name: &str, ) -> std::result::Result<models::USqlProcedure, get_procedure::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/procedures/{}", operation_config.base_path(), database_name, schema_name, procedure_name ); let mut url = url::Url::parse(url_str).map_err(get_procedure::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_procedure::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_procedure::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_procedure::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlProcedure = serde_json::from_slice(rsp_body).map_err(|source| get_procedure::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_procedure::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_procedure { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_procedures( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlProcedureList, list_procedures::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/procedures", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_procedures::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_procedures::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_procedures::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_procedures::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlProcedureList = serde_json::from_slice(rsp_body) .map_err(|source| list_procedures::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_procedures::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_procedures { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_table( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_name: &str, ) -> std::result::Result<models::USqlTable, get_table::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables/{}", operation_config.base_path(), database_name, schema_name, table_name ); let mut url = url::Url::parse(url_str).map_err(get_table::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_table::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_table::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_table::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTable = serde_json::from_slice(rsp_body).map_err(|source| get_table::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_table::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_table { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_tables( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTableList, list_tables::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_tables::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_tables::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_tables::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_tables::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableList = serde_json::from_slice(rsp_body).map_err(|source| list_tables::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_tables::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_tables { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_table_type( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_type_name: &str, ) -> std::result::Result<models::USqlTableType, get_table_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tabletypes/{}", operation_config.base_path(), database_name, schema_name, table_type_name ); let mut url = url::Url::parse(url_str).map_err(get_table_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_table_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_table_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_table_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableType = serde_json::from_slice(rsp_body).map_err(|source| get_table_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_table_type::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_table_type { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_table_types( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTableTypeList, list_table_types::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tabletypes", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_table_types::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_table_types::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_table_types::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_table_types::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableTypeList = serde_json::from_slice(rsp_body) .map_err(|source| list_table_types::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_table_types::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_table_types { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_view( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, view_name: &str, ) -> std::result::Result<models::USqlView, get_view::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/views/{}", operation_config.base_path(), database_name, schema_name, view_name ); let mut url = url::Url::parse(url_str).map_err(get_view::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_view::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_view::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_view::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlView = serde_json::from_slice(rsp_body).map_err(|source| get_view::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_view::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_view { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_views( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlViewList, list_views::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/views", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_views::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_views::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_views::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_views::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlViewList = serde_json::from_slice(rsp_body).map_err(|source| list_views::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_views::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_views { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_table_statistic( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_name: &str, statistics_name: &str, ) -> std::result::Result<models::USqlTableStatistics, get_table_statistic::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables/{}/statistics/{}", operation_config.base_path(), database_name, schema_name, table_name, statistics_name ); let mut url = url::Url::parse(url_str).map_err(get_table_statistic::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_table_statistic::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_table_statistic::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_table_statistic::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableStatistics = serde_json::from_slice(rsp_body) .map_err(|source| get_table_statistic::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_table_statistic::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_table_statistic { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_table_statistics( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTableStatisticsList, list_table_statistics::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables/{}/statistics", operation_config.base_path(), database_name, schema_name, table_name ); let mut url = url::Url::parse(url_str).map_err(list_table_statistics::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_table_statistics::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_table_statistics::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_table_statistics::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableStatisticsList = serde_json::from_slice(rsp_body) .map_err(|source| list_table_statistics::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_table_statistics::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_table_statistics { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_table_partition( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_name: &str, partition_name: &str, ) -> std::result::Result<models::USqlTablePartition, get_table_partition::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables/{}/partitions/{}", operation_config.base_path(), database_name, schema_name, table_name, partition_name ); let mut url = url::Url::parse(url_str).map_err(get_table_partition::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_table_partition::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_table_partition::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_table_partition::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTablePartition = serde_json::from_slice(rsp_body) .map_err(|source| get_table_partition::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_table_partition::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_table_partition { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_table_partitions( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTablePartitionList, list_table_partitions::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tables/{}/partitions", operation_config.base_path(), database_name, schema_name, table_name ); let mut url = url::Url::parse(url_str).map_err(list_table_partitions::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_table_partitions::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_table_partitions::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_table_partitions::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTablePartitionList = serde_json::from_slice(rsp_body) .map_err(|source| list_table_partitions::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_table_partitions::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_table_partitions { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_types( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTypeList, list_types::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/types", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_types::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_types::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_types::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_types::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTypeList = serde_json::from_slice(rsp_body).map_err(|source| list_types::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_types::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_types { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_table_valued_function( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, table_valued_function_name: &str, ) -> std::result::Result<models::USqlTableValuedFunction, get_table_valued_function::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tablevaluedfunctions/{}", operation_config.base_path(), database_name, schema_name, table_valued_function_name ); let mut url = url::Url::parse(url_str).map_err(get_table_valued_function::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_table_valued_function::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(get_table_valued_function::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_table_valued_function::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableValuedFunction = serde_json::from_slice(rsp_body) .map_err(|source| get_table_valued_function::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_table_valued_function::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_table_valued_function { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_table_valued_functions( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlTableValuedFunctionList, list_table_valued_functions::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}/tablevaluedfunctions", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(list_table_valued_functions::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_table_valued_functions::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_table_valued_functions::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_table_valued_functions::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlTableValuedFunctionList = serde_json::from_slice(rsp_body) .map_err(|source| list_table_valued_functions::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_table_valued_functions::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_table_valued_functions { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_assembly( operation_config: &crate::OperationConfig, database_name: &str, assembly_name: &str, ) -> std::result::Result<models::USqlAssembly, get_assembly::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/assemblies/{}", operation_config.base_path(), database_name, assembly_name ); let mut url = url::Url::parse(url_str).map_err(get_assembly::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_assembly::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_assembly::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_assembly::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlAssembly = serde_json::from_slice(rsp_body).map_err(|source| get_assembly::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_assembly::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_assembly { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_assemblies( operation_config: &crate::OperationConfig, database_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlAssemblyList, list_assemblies::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/assemblies", operation_config.base_path(), database_name ); let mut url = url::Url::parse(url_str).map_err(list_assemblies::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_assemblies::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_assemblies::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_assemblies::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlAssemblyList = serde_json::from_slice(rsp_body) .map_err(|source| list_assemblies::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_assemblies::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_assemblies { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_schema( operation_config: &crate::OperationConfig, database_name: &str, schema_name: &str, ) -> std::result::Result<models::USqlSchema, get_schema::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/catalog/usql/databases/{}/schemas/{}", operation_config.base_path(), database_name, schema_name ); let mut url = url::Url::parse(url_str).map_err(get_schema::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_schema::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_schema::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_schema::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlSchema = serde_json::from_slice(rsp_body).map_err(|source| get_schema::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_schema::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_schema { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_schemas( operation_config: &crate::OperationConfig, database_name: &str, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlSchemaList, list_schemas::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/catalog/usql/databases/{}/schemas", operation_config.base_path(), database_name); let mut url = url::Url::parse(url_str).map_err(list_schemas::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_schemas::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_schemas::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_schemas::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlSchemaList = serde_json::from_slice(rsp_body).map_err(|source| list_schemas::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_schemas::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_schemas { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_database( operation_config: &crate::OperationConfig, database_name: &str, ) -> std::result::Result<models::USqlDatabase, get_database::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/catalog/usql/databases/{}", operation_config.base_path(), database_name); let mut url = url::Url::parse(url_str).map_err(get_database::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_database::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_database::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_database::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlDatabase = serde_json::from_slice(rsp_body).map_err(|source| get_database::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(get_database::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod get_database { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_databases( operation_config: &crate::OperationConfig, filter: Option<&str>, top: Option<i32>, skip: Option<i32>, expand: Option<&str>, select: Option<&str>, orderby: Option<&str>, count: Option<bool>, ) -> std::result::Result<models::USqlDatabaseList, list_databases::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/catalog/usql/databases", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list_databases::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_databases::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", super::API_VERSION); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } if let Some(expand) = expand { url.query_pairs_mut().append_pair("$expand", expand); } if let Some(select) = select { url.query_pairs_mut().append_pair("$select", select); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(count) = count { url.query_pairs_mut().append_pair("$count", count.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_databases::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_databases::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: models::USqlDatabaseList = serde_json::from_slice(rsp_body).map_err(|source| list_databases::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); Err(list_databases::Error::UnexpectedResponse { status_code, body: rsp_body.clone(), }) } } } pub mod list_databases { use super::{models, API_VERSION}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("Unexpected HTTP status code {}", status_code)] UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
fn main() { println!("{:?}", "STDOUT".chars()); eprintln!("{:?}", "STDERR".chars()); }
extern crate libc; use std::env; use std::process; use std::str::FromStr; use std::ffi::CStr; // adapted from https://github.com/fengcen/hostname/blob/master/src/lib.rs extern "C" { fn gethostname(name: *mut libc::c_char, size: libc::size_t) -> libc::c_int; } pub fn get_hostname() -> Option<String> { let mut buffer = Vec::<u8>::with_capacity(255); let pointer = buffer.as_mut_ptr() as *mut libc::c_char; unsafe { match gethostname(pointer, buffer.capacity() as libc::size_t) { 0 => { let hostname = CStr::from_ptr(pointer) .to_string_lossy() .into_owned(); Some(hostname) }, _ => None, } } } enum DisplayLength { Full, Short, } impl FromStr for DisplayLength { type Err = (); fn from_str(s: &str) -> Result<Self, ()> { match s { "-s" | "--short" => Ok(DisplayLength::Short), "-f" | "--full" => Ok(DisplayLength::Full), _ => Err(()), } } } struct Arguments { _program_name: String, display_length: DisplayLength, } impl Arguments { pub fn create(args: Vec<String>) -> Arguments { let (program_name, flags) = args.split_at(1); let _program_name = program_name .iter() .map(|v| String::from(v.clone())) .collect(); let flags: Vec<String> = flags .iter() .map(|v| String::from(v.clone())) .collect(); let display_length = match flags.get(0) { Some(arg) => arg .parse() .unwrap_or(DisplayLength::Full), None => DisplayLength::Full, }; Arguments { _program_name, display_length, } } } fn main() { let args = Arguments::create(env::args().collect()); match (get_hostname(), args.display_length) { (Some(h), DisplayLength::Full) => println!("{}", h), (Some(h), DisplayLength::Short) => { let elements: Vec<String> = h .split(".") .map(|v| String::from(v.clone())) .take(1) .collect(); match elements.get(0) { Some(s) => println!("{}", s), None => process::exit(1), }; }, _ => println!("None"), }; }
//! # Organix, organic application //! //! `Organix` provides an opinionated way to build application with //! multiple services independent from each other but still require //! communication channels. //! //! With `Organix` it is possible to design the different components //! of your application in isolation from each others. It allows to //! build runtimes for your applications. //! //! # Minimal configuration //! //! The core component of `Organix` is the [`Watchdog`]. In order to //! build the [`Watchdog`] you need to define an [`Organix`] object //! which defines your app and its services. //! //! ``` //! use organix::{Organix, WatchdogBuilder}; //! //! #[derive(Organix)] //! struct App; //! //! let watchdog = WatchdogBuilder::<App>::new().build(); //! ``` //! //! # defining a service //! //! Now defining a new service: //! //! ``` //! use organix::{Organix, IntercomMsg, ServiceState, Service, ServiceIdentifier, service}; //! use async_trait::async_trait; //! //! struct HeartBeat(ServiceState<Self>); //! //! #[async_trait] //! impl Service for HeartBeat { //! const SERVICE_IDENTIFIER: ServiceIdentifier = "heart-beat"; //! type IntercomMsg = service::NoIntercom; //! //! fn prepare(state: ServiceState<Self>) -> Self { //! // initialize the state of the service //! Self(state) //! } //! async fn start(mut self) { //! // where you do the work //! } //! } //! ``` //! //! Now from there you can start the service by adding it in the `App`: //! //! ``` //! use organix::{Organix, ServiceManager}; //! # use organix::{IntercomMsg, ServiceState, Service, ServiceIdentifier, service}; //! # use async_trait::async_trait; //! # //! # struct HeartBeat(ServiceState<Self>); //! # //! # #[async_trait] //! # impl Service for HeartBeat { //! # const SERVICE_IDENTIFIER: ServiceIdentifier = "heart-beat"; //! # type IntercomMsg = service::NoIntercom; //! # //! # fn prepare(state: ServiceState<Self>) -> Self { //! # // initialize the state of the service //! # Self(state) //! # } //! # async fn start(mut self) { //! # // where you do the work //! # } //! # } //! //! #[derive(Organix)] //! struct App { //! heart_beat: service::ServiceManager<HeartBeat>, //! } //! ``` //! //! See the [examples] for more complete details on how to build services //! with the provided interface. //! //! # Configuring the runtime //! //! It is possible to configure the runtime of the different serviced. //! //! ## on the `Organix` app type //! //! * `#[runtime(shared)]`: will make all the services to use a _shared_ runtime //! by default. Otherwise the default is for every service to run an individual //! runtime. //! //! ## On the field of the `Organix` app type //! //! * `#[runtime(shared)]`: will make the associated service to use a shared runtime //! with the other _shared_ labeled services. This shared runtime has `io` and //! `time` drivers already enabled. //! * `#[runtime(io)]`: enable the `io` driver; //! * `#[runtime(time)]`: enable the `time` driver; //! * `#[runtime(skip)]`: ignore the field. //! //! [examples]: https://github.com/primetype/organix/tree/master/examples //! [`Watchdog`]: ./struct.WatchdogMonitor.html pub mod runtime; pub mod service; mod watchdog; pub use organix_derive::{IntercomMsg, Organix}; pub use service::{Service, ServiceIdentifier, ServiceManager, ServiceState}; pub use watchdog::{Organix, WatchdogBuilder, WatchdogError, WatchdogMonitor, WatchdogQuery};
use sp_core::{Pair, sr25519}; use sc_service; use sp_runtime::traits::{Verify, IdentifyAccount}; use runtime::{AccountId, GenesisConfig, Signature, genesis::testnet_genesis}; // Note this is the URL for the telemetry server //const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/"; /// Specialized `ChainSpec`. This is a specialization of the general Substrate `ChainSpec` type. pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>; /// Helper function to generate a crypto pair from seed pub fn get_from_seed<TPair: Pair>(seed: &str) -> TPair::Public { TPair::from_string(&format!("//{}", seed), None) .expect("static values are valid; qed") .public() } type AccountPublic = <Signature as Verify>::Signer; /// Helper function to generate an account ID from seed pub fn get_account_id_from_seed<TPair: Pair>(seed: &str) -> AccountId where AccountPublic: From<TPair::Public> { AccountPublic::from(get_from_seed::<TPair>(seed)).into_account() } pub fn dev_config() -> ChainSpec { ChainSpec::from_genesis( "Development", "dev", sc_service::ChainType::Development, || testnet_genesis( get_account_id_from_seed::<sr25519::Pair>("Alice"), vec![ get_account_id_from_seed::<sr25519::Pair>("Alice"), get_account_id_from_seed::<sr25519::Pair>("Bob"), get_account_id_from_seed::<sr25519::Pair>("Alice//stash"), get_account_id_from_seed::<sr25519::Pair>("Bob//stash"), ], true, ), vec![], None, None, None, None ) } pub fn local_testnet_config() -> ChainSpec { ChainSpec::from_genesis( "Local Testnet", "local_testnet", sc_service::ChainType::Local, || testnet_genesis( get_account_id_from_seed::<sr25519::Pair>("Alice"), vec![ get_account_id_from_seed::<sr25519::Pair>("Alice"), get_account_id_from_seed::<sr25519::Pair>("Bob"), get_account_id_from_seed::<sr25519::Pair>("Charlie"), get_account_id_from_seed::<sr25519::Pair>("Dave"), get_account_id_from_seed::<sr25519::Pair>("Eve"), get_account_id_from_seed::<sr25519::Pair>("Ferdie"), get_account_id_from_seed::<sr25519::Pair>("Alice//stash"), get_account_id_from_seed::<sr25519::Pair>("Bob//stash"), get_account_id_from_seed::<sr25519::Pair>("Charlie//stash"), get_account_id_from_seed::<sr25519::Pair>("Dave//stash"), get_account_id_from_seed::<sr25519::Pair>("Eve//stash"), get_account_id_from_seed::<sr25519::Pair>("Ferdie//stash"), ], true, ), vec![], None, None, None, None ) }
mod session; pub use session::WsChatSession;
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Tests for the various helper functions used by the needless_continue // lint that don't belong in utils. use clippy_lints::needless_continue::{erode_block, erode_from_back, erode_from_front}; #[test] #[rustfmt::skip] fn test_erode_from_back() { let input = "\ { let x = 5; let y = format!(\"{}\", 42); }"; let expected = "\ { let x = 5; let y = format!(\"{}\", 42);"; let got = erode_from_back(input); assert_eq!(expected, got); } #[test] #[rustfmt::skip] fn test_erode_from_back_no_brace() { let input = "\ let x = 5; let y = something(); "; let expected = ""; let got = erode_from_back(input); assert_eq!(expected, got); } #[test] #[rustfmt::skip] fn test_erode_from_front() { let input = " { something(); inside_a_block(); } "; let expected = " something(); inside_a_block(); } "; let got = erode_from_front(input); println!("input: {}\nexpected:\n{}\ngot:\n{}", input, expected, got); assert_eq!(expected, got); } #[test] #[rustfmt::skip] fn test_erode_from_front_no_brace() { let input = " something(); inside_a_block(); "; let expected = "something(); inside_a_block(); "; let got = erode_from_front(input); println!("input: {}\nexpected:\n{}\ngot:\n{}", input, expected, got); assert_eq!(expected, got); } #[test] #[rustfmt::skip] fn test_erode_block() { let input = " { something(); inside_a_block(); } "; let expected = " something(); inside_a_block();"; let got = erode_block(input); println!("input: {}\nexpected:\n{}\ngot:\n{}", input, expected, got); assert_eq!(expected, got); }
use std::vec; pub fn extract_active_bits(value: u64) -> vec::Vec<u8> { let mut bit_vec: vec::Vec<u8> = Vec::new(); for i in 0..64 { if (value & (1 << i)) > 0 { bit_vec.push(i); } } return bit_vec; }
pub mod app; pub mod network; pub mod webgl; use app::App; use js_sys::Object; use std::cell::RefCell; use std::rc::Rc; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::console; use web_sys::Document; use web_sys::HtmlCanvasElement; use web_sys::KeyboardEvent; use web_sys::MouseEvent; use web_sys::WebGl2RenderingContext; use web_sys::Window; fn print(s: &'static str) { console::log_1(&s.into()); } fn dimensions(window: &Window) -> (u32, u32) { let width = window.inner_width().unwrap().as_f64().unwrap() as u32; let height = window.inner_height().unwrap().as_f64().unwrap() as u32; (width, height) } fn canvas(document: &Document, width: u32, height: u32) -> Result<HtmlCanvasElement, JsValue> { let canvas = document.create_element("canvas")?.dyn_into::<HtmlCanvasElement>()?; canvas.set_width(width); canvas.set_height(height); let style = canvas.style(); style.set_property("display", "block")?; style.set_property("position", "absolute")?; style.set_property("left", "0")?; style.set_property("right", "0")?; style.set_property("top", "0")?; style.set_property("bottom", "0")?; style.set_property("margin", "0")?; document.body().unwrap().append_child(&canvas)?; Ok(canvas) } fn webgl_context(canvas: &HtmlCanvasElement) -> Result<WebGl2RenderingContext, Object> { canvas .get_context("webgl2")? .expect("Unable to get WebGL2 context") .dyn_into::<WebGl2RenderingContext>() } fn request_animation_frame(function: &Closure<dyn FnMut()>) { web_sys::window() .unwrap() .request_animation_frame(function.as_ref().unchecked_ref()) .unwrap(); } fn keyboard(app: &mut App, code: String, down: bool) { app.keyboard(code, down); } fn tick(app: &mut App) { app.update(); app.render(); } #[cfg(feature = "console_error_panic_hook")] fn console_panic_hook() { print("using console panic hook feature"); use std::panic; extern crate console_error_panic_hook; panic::set_hook(Box::new(console_error_panic_hook::hook)); } #[cfg(not(feature = "console_error_panic_hook"))] pub fn console_panic_hook() {} #[wasm_bindgen(start)] pub async fn main() -> Result<(), JsValue> { print("scroll and sigil"); console_panic_hook(); let window = web_sys::window().unwrap(); let document = window.document().unwrap(); let (width, height) = dimensions(&window); let canvas = canvas(&document, width, height)?; let context = webgl_context(&canvas)?; let document = Rc::new(document); let context = Rc::new(context); let app = App::new(context.clone()); let app = Rc::new(RefCell::new(app)); { let mut app = app.borrow_mut(); let init = app.initialize(); init.await?; app.resize(width, height); } { let closure = Closure::wrap(Box::new(move |_event: MouseEvent| { print("mouse down!"); }) as Box<dyn FnMut(_)>); canvas.add_event_listener_with_callback("mousedown", closure.as_ref().unchecked_ref())?; closure.forget(); } { let app = app.clone(); let closure = Closure::wrap(Box::new(move |event: KeyboardEvent| { let code = event.code(); let mut app = app.borrow_mut(); keyboard(&mut app, code, true); }) as Box<dyn FnMut(_)>); document.add_event_listener_with_callback("keydown", closure.as_ref().unchecked_ref())?; closure.forget(); } { let app = app.clone(); let closure = Closure::wrap(Box::new(move |event: KeyboardEvent| { let code = event.code(); let mut app = app.borrow_mut(); keyboard(&mut app, code, false); }) as Box<dyn FnMut(_)>); document.add_event_listener_with_callback("keyup", closure.as_ref().unchecked_ref())?; closure.forget(); } { let app = app.clone(); let closure = Closure::wrap(Box::new(move || { let window = web_sys::window().unwrap(); let (width, height) = dimensions(&window); app.borrow_mut().resize(width, height); }) as Box<dyn FnMut()>); window.add_event_listener_with_callback("resize", closure.as_ref().unchecked_ref())?; closure.forget(); } { let f = Rc::new(RefCell::new(None)); let g = f.clone(); *g.borrow_mut() = Some(Closure::wrap(Box::new(move || { tick(&mut app.borrow_mut()); request_animation_frame(f.borrow().as_ref().unwrap()); }) as Box<dyn FnMut()>)); request_animation_frame(g.borrow().as_ref().unwrap()); } Ok(()) }
use dirs; use std::env; use std::path::PathBuf; #[derive(Debug, Clone)] pub struct Directories { pub log_dir: PathBuf, pub themes_dir: PathBuf, pub fonts_dir: PathBuf, pub config_dir: PathBuf, pub project_dir: PathBuf, } impl Directories { pub fn new(config_dir: Option<String>, project_dir: Option<String>) -> Self { let path = match config_dir { Some(s) => s, None => dirs::config_dir().unwrap().to_str().unwrap().to_owned(), }; let mut config_dir = PathBuf::new(); config_dir.push(path); config_dir.push("rider"); let path = project_dir .unwrap_or_else(|| dirs::runtime_dir().unwrap().to_str().unwrap().to_owned()); let mut project_dir = PathBuf::new(); project_dir.push(path); project_dir.push(".rider"); Self { log_dir: log_dir(&config_dir), themes_dir: themes_dir(&config_dir), fonts_dir: fonts_dir(&config_dir), config_dir, project_dir, } } } pub fn log_dir(config_dir: &PathBuf) -> PathBuf { let path = config_dir.to_str().unwrap().to_owned(); let mut path_buf = PathBuf::new(); path_buf.push(path); path_buf.push("log"); path_buf } pub fn themes_dir(config_dir: &PathBuf) -> PathBuf { let path = config_dir.to_str().unwrap().to_owned(); let mut path_buf = PathBuf::new(); path_buf.push(path); path_buf.push("themes"); path_buf } pub fn fonts_dir(config_dir: &PathBuf) -> PathBuf { PathBuf::from(config_dir.to_str().unwrap().to_owned()).join("fonts") } #[cfg_attr(tarpaulin, skip)] pub fn project_dir() -> PathBuf { PathBuf::from(dirs::runtime_dir().unwrap().to_str().unwrap().to_owned()).join(".rider") } #[cfg_attr(tarpaulin, skip)] pub fn binaries_directory() -> Result<PathBuf, String> { let mut exec_dir = PathBuf::new(); exec_dir.push(dirs::executable_dir().unwrap()); let mut rider_editor = exec_dir.clone(); rider_editor.push("rider-editor"); if rider_editor.exists() { return Ok(exec_dir); } let path = dirs::runtime_dir().unwrap().to_str().unwrap().to_owned(); let mut path_buf = PathBuf::new(); path_buf.push(path.clone()); path_buf.push("rider-editor"); if path_buf.exists() { let mut path_buf = PathBuf::new(); path_buf.push(path); return Ok(path_buf); } let mut current_dir = env::current_dir().unwrap(); current_dir.push("target"); current_dir.push("debug"); let mut rider_editor = current_dir.clone(); rider_editor.push("rider-editor"); if rider_editor.exists() { return Ok(current_dir); } let executable = dirs::executable_dir().unwrap(); let mut rider_editor = executable.clone(); rider_editor.push("rider-editor"); if rider_editor.exists() { return Ok(executable); } Err("Cannot find binaries!".to_string()) } #[cfg_attr(tarpaulin, skip)] pub fn get_binary_path(name: &str) -> Result<String, String> { if cfg!(test) { use std::fs; let mut current_dir = env::current_dir().unwrap(); current_dir.push("target"); current_dir.push("debug"); let name = name.to_string().to_lowercase().replace("-", "_"); println!(" name {:?}", name); current_dir.push(vec![name.clone(), "*".to_string()].join("-")); for entry in fs::read_dir(current_dir.to_str().unwrap()).unwrap() { if let Ok(entry) = entry { if let Ok(meta) = entry.metadata() { if meta.is_file() && !entry.path().ends_with(".d") { return Ok(entry.path().to_str().unwrap().to_string()); } } } } Err(format!("Cannot find {:?}", name)) } else { let r = binaries_directory(); let mut binaries: PathBuf = r.unwrap_or_else(|e| panic!(e)); binaries.push(name.to_string()); println!(" name {}", name); match binaries.to_str() { Some(s) => Ok(s.to_owned()), _ => Err(format!("Cannot find {:?}", name)), } } } #[cfg(test)] mod tests { use super::*; use std::path::{Path, PathBuf}; #[test] fn assert_log_dir() { let directories = Directories::new(Some("/tmp".to_owned()), None); let path = directories.log_dir.clone(); let expected: PathBuf = Path::new("/tmp/rider/log").into(); assert_eq!(path, expected); } #[test] fn assert_themes_dir() { let directories = Directories::new(Some("/tmp".to_owned()), None); let path = directories.themes_dir.clone(); let expected: PathBuf = Path::new("/tmp/rider/themes").into(); assert_eq!(path, expected); } #[test] fn assert_fonts_dir() { let directories = Directories::new(Some("/tmp".to_owned()), None); let path = directories.fonts_dir.clone(); let expected: PathBuf = Path::new("/tmp/rider/fonts").into(); assert_eq!(path, expected); } #[test] fn assert_config_dir() { let directories = Directories::new(Some("/tmp".to_owned()), None); let path = directories.config_dir.clone(); let expected: PathBuf = Path::new("/tmp/rider").into(); assert_eq!(path, expected); } }
use crate::HandlerResult; use wapc_guest::host_call; use wascc_codec::blobstore::Blob; use wascc_codec::blobstore::Container; use wascc_codec::blobstore::{BlobList, FileChunk, StreamRequest, Transfer}; use wascc_codec::blobstore::{ OP_CREATE_CONTAINER, OP_GET_OBJECT_INFO, OP_LIST_OBJECTS, OP_REMOVE_CONTAINER, OP_REMOVE_OBJECT, OP_START_DOWNLOAD, OP_START_UPLOAD, OP_UPLOAD_CHUNK, }; use wascc_codec::{deserialize, serialize}; const CAPID_BLOBSTORE: &str = "wascc:blobstore"; /// An abstraction around a host runtime capability for a key-value store pub struct ObjectStoreHostBinding { binding: String, } impl Default for ObjectStoreHostBinding { fn default() -> Self { ObjectStoreHostBinding { binding: "default".to_string(), } } } /// Creates a named host binding for the `wascc:objectstore` capability pub fn host(binding: &str) -> ObjectStoreHostBinding { ObjectStoreHostBinding { binding: binding.to_string(), } } /// Creates the default host binding for the `wascc:objectstore` capability pub fn default() -> ObjectStoreHostBinding { ObjectStoreHostBinding::default() } impl ObjectStoreHostBinding { /// Creates a new container within the store pub fn create_container(&self, name: &str) -> HandlerResult<Container> { let cmd = Container { id: name.to_string(), }; host_call( &self.binding, CAPID_BLOBSTORE, OP_CREATE_CONTAINER, &serialize(cmd)?, ) .map(|v| deserialize::<Container>(v.as_ref()).unwrap()) .map_err(|e| e.into()) } /// Removes a container from the store. Whether or not this will fail if the container /// has items may be specific to a given provider implementation. pub fn remove_container(&self, name: &str) -> HandlerResult<()> { let cmd = Container { id: name.to_string(), }; host_call( &self.binding, CAPID_BLOBSTORE, OP_REMOVE_CONTAINER, &serialize(cmd)?, ) .map(|_v| ()) .map_err(|e| e.into()) } /// Removes an object from a container pub fn remove_object(&self, name: &str, container: &str) -> crate::HandlerResult<()> { let cmd = Blob { id: name.to_string(), container: container.to_string(), byte_size: 0, }; host_call( &self.binding, CAPID_BLOBSTORE, OP_REMOVE_OBJECT, &serialize(cmd)?, ) .map(|_v| ()) .map_err(|e| e.into()) } /// Lists all objects within a container pub fn list_objects(&self, container: &str) -> HandlerResult<BlobList> { let cmd = Container { id: container.to_string(), }; host_call( &self.binding, CAPID_BLOBSTORE, OP_LIST_OBJECTS, &serialize(cmd)?, ) .map(|v| deserialize::<BlobList>(v.as_ref()).unwrap()) .map_err(|e| e.into()) } /// Obtains binary object metadata, does not include the object bytes pub fn get_blob_info(&self, container: &str, id: &str) -> HandlerResult<Option<Blob>> { let cmd = Blob { id: id.to_string(), container: container.to_string(), byte_size: 0, }; host_call( &self.binding, CAPID_BLOBSTORE, OP_GET_OBJECT_INFO, &serialize(cmd)?, ) .map(|v| { let b = deserialize::<Blob>(v.as_ref()).unwrap(); if b.id.is_empty() { None } else { Some(b) } }) .map_err(|e| e.into()) } /// Indicates that an upload is about to begin for an item. You should follow this /// call up with a for loop/iteration that sends successive chunks to the store. The chunk /// size specified in this call is a request or suggestion. It is up to the provider to determine /// the actual chunk size, which is returned in the resulting `Transfer` instance pub fn start_upload( &self, blob: &Blob, chunk_size: u64, total_bytes: u64, ) -> HandlerResult<Transfer> { let transfer = Transfer { blob_id: blob.id.to_string(), container: blob.container.to_string(), chunk_size, total_size: total_bytes, total_chunks: total_bytes / chunk_size, context: None, }; let cmd = FileChunk { sequence_no: 0, container: blob.container.to_string(), id: blob.id.to_string(), chunk_size, total_bytes, chunk_bytes: vec![], context: None, }; host_call( &self.binding, CAPID_BLOBSTORE, OP_START_UPLOAD, &serialize(cmd)?, ) .map(|_v| transfer) .map_err(|e| e.into()) } /// Uploads an individual chunk of a file to the blob store. This call must only ever /// come after signaling the start of a new upload with the `start_upload` function. pub fn upload_chunk( &self, transfer: &Transfer, offset: u64, bytes: &[u8], ) -> crate::HandlerResult<()> { let cmd = FileChunk { id: transfer.blob_id.to_string(), container: transfer.container.to_string(), sequence_no: offset, chunk_size: transfer.chunk_size, total_bytes: transfer.total_size, chunk_bytes: bytes.to_vec(), context: None, }; host_call( &self.binding, CAPID_BLOBSTORE, OP_UPLOAD_CHUNK, &serialize(cmd)?, ) .map(|_v| ()) .map_err(|e| e.into()) } /// Sends a request to the provider to begin a chunked download of a file. If this /// succeeds, your actor will begin receiving `OP_RECEIVE_CHUNK` messages from the /// provider. pub fn start_download( &self, blob: &Blob, chunk_size: u64, context: Option<String>, ) -> crate::HandlerResult<Transfer> { let transfer = Transfer { blob_id: blob.id.to_string(), container: blob.container.to_string(), chunk_size, total_size: blob.byte_size, total_chunks: blob.byte_size / chunk_size, context: context.clone(), }; let cmd = StreamRequest { container: blob.container.to_string(), id: blob.id.to_string(), chunk_size, context, }; host_call( &self.binding, CAPID_BLOBSTORE, OP_START_DOWNLOAD, &serialize(cmd)?, ) .map(|_v| transfer) .map_err(|e| e.into()) } }
#[doc = "Register `GPIOD_AFRL` reader"] pub type R = crate::R<GPIOD_AFRL_SPEC>; #[doc = "Register `GPIOD_AFRL` writer"] pub type W = crate::W<GPIOD_AFRL_SPEC>; #[doc = "Field `AFR0` reader - AFR0"] pub type AFR0_R = crate::FieldReader; #[doc = "Field `AFR0` writer - AFR0"] pub type AFR0_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR1` reader - AFR1"] pub type AFR1_R = crate::FieldReader; #[doc = "Field `AFR1` writer - AFR1"] pub type AFR1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR2` reader - AFR2"] pub type AFR2_R = crate::FieldReader; #[doc = "Field `AFR2` writer - AFR2"] pub type AFR2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR3` reader - AFR3"] pub type AFR3_R = crate::FieldReader; #[doc = "Field `AFR3` writer - AFR3"] pub type AFR3_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR4` reader - AFR4"] pub type AFR4_R = crate::FieldReader; #[doc = "Field `AFR4` writer - AFR4"] pub type AFR4_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR5` reader - AFR5"] pub type AFR5_R = crate::FieldReader; #[doc = "Field `AFR5` writer - AFR5"] pub type AFR5_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR6` reader - AFR6"] pub type AFR6_R = crate::FieldReader; #[doc = "Field `AFR6` writer - AFR6"] pub type AFR6_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `AFR7` reader - AFR7"] pub type AFR7_R = crate::FieldReader; #[doc = "Field `AFR7` writer - AFR7"] pub type AFR7_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; impl R { #[doc = "Bits 0:3 - AFR0"] #[inline(always)] pub fn afr0(&self) -> AFR0_R { AFR0_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - AFR1"] #[inline(always)] pub fn afr1(&self) -> AFR1_R { AFR1_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:11 - AFR2"] #[inline(always)] pub fn afr2(&self) -> AFR2_R { AFR2_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bits 12:15 - AFR3"] #[inline(always)] pub fn afr3(&self) -> AFR3_R { AFR3_R::new(((self.bits >> 12) & 0x0f) as u8) } #[doc = "Bits 16:19 - AFR4"] #[inline(always)] pub fn afr4(&self) -> AFR4_R { AFR4_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bits 20:23 - AFR5"] #[inline(always)] pub fn afr5(&self) -> AFR5_R { AFR5_R::new(((self.bits >> 20) & 0x0f) as u8) } #[doc = "Bits 24:27 - AFR6"] #[inline(always)] pub fn afr6(&self) -> AFR6_R { AFR6_R::new(((self.bits >> 24) & 0x0f) as u8) } #[doc = "Bits 28:31 - AFR7"] #[inline(always)] pub fn afr7(&self) -> AFR7_R { AFR7_R::new(((self.bits >> 28) & 0x0f) as u8) } } impl W { #[doc = "Bits 0:3 - AFR0"] #[inline(always)] #[must_use] pub fn afr0(&mut self) -> AFR0_W<GPIOD_AFRL_SPEC, 0> { AFR0_W::new(self) } #[doc = "Bits 4:7 - AFR1"] #[inline(always)] #[must_use] pub fn afr1(&mut self) -> AFR1_W<GPIOD_AFRL_SPEC, 4> { AFR1_W::new(self) } #[doc = "Bits 8:11 - AFR2"] #[inline(always)] #[must_use] pub fn afr2(&mut self) -> AFR2_W<GPIOD_AFRL_SPEC, 8> { AFR2_W::new(self) } #[doc = "Bits 12:15 - AFR3"] #[inline(always)] #[must_use] pub fn afr3(&mut self) -> AFR3_W<GPIOD_AFRL_SPEC, 12> { AFR3_W::new(self) } #[doc = "Bits 16:19 - AFR4"] #[inline(always)] #[must_use] pub fn afr4(&mut self) -> AFR4_W<GPIOD_AFRL_SPEC, 16> { AFR4_W::new(self) } #[doc = "Bits 20:23 - AFR5"] #[inline(always)] #[must_use] pub fn afr5(&mut self) -> AFR5_W<GPIOD_AFRL_SPEC, 20> { AFR5_W::new(self) } #[doc = "Bits 24:27 - AFR6"] #[inline(always)] #[must_use] pub fn afr6(&mut self) -> AFR6_W<GPIOD_AFRL_SPEC, 24> { AFR6_W::new(self) } #[doc = "Bits 28:31 - AFR7"] #[inline(always)] #[must_use] pub fn afr7(&mut self) -> AFR7_W<GPIOD_AFRL_SPEC, 28> { AFR7_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "GPIO alternate function low register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gpiod_afrl::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gpiod_afrl::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct GPIOD_AFRL_SPEC; impl crate::RegisterSpec for GPIOD_AFRL_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`gpiod_afrl::R`](R) reader structure"] impl crate::Readable for GPIOD_AFRL_SPEC {} #[doc = "`write(|w| ..)` method takes [`gpiod_afrl::W`](W) writer structure"] impl crate::Writable for GPIOD_AFRL_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets GPIOD_AFRL to value 0"] impl crate::Resettable for GPIOD_AFRL_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[macro_use] extern crate graphql_client; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; #[derive(GraphQLQuery)] #[graphql( query_path = "tests/operation_selection/queries.graphql", schema_path = "tests/operation_selection/schema.graphql", response_derives = "Debug", )] #[allow(dead_code)] struct Heights; #[derive(GraphQLQuery)] #[graphql( query_path = "tests/operation_selection/queries.graphql", schema_path = "tests/operation_selection/schema.graphql", response_derives = "Debug", )] #[allow(dead_code)] struct Echo; const HEIGHTS_RESPONSE: &'static str = r##"{"mountainHeight": 224, "buildingHeight": 12}"##; const ECHO_RESPONSE: &'static str = r##"{"echo": "tiramisù"}"##; #[test] fn operation_selection_works() { let heights_response_data: heights::ResponseData = serde_json::from_str(HEIGHTS_RESPONSE).unwrap(); let echo_response_data: echo::ResponseData = serde_json::from_str(ECHO_RESPONSE).unwrap(); let _echo_variables = echo::Variables { msg: Some("hi".to_string()), }; let _height_variables = heights::Variables { building_id: "12".to_string(), mountain_name: Some("canigou".to_string()), }; let expected_echo = r##"ResponseData { echo: Some("tiramisù") }"##; let expected_heights = r##"ResponseData { mountain_height: Some(224), building_height: Some(12) }"##; assert_eq!(expected_echo, format!("{:?}", echo_response_data)); assert_eq!(expected_heights, format!("{:?}", heights_response_data)); }
extern crate hid; extern crate cp211x_uart; use std::time::Duration; use cp211x_uart::{HidUart, UartConfig, DataBits, StopBits, Parity, FlowControl}; fn run() -> Result<(), cp211x_uart::Error> { let manager = hid::init()?; for device in manager.find(Some(0x10C4), Some(0xEA80)) { let handle = device.open()?; let mut uart = HidUart::new(handle)?; let config = UartConfig { baud_rate: 9600, data_bits: DataBits::Bits8, stop_bits: StopBits::Short, parity: Parity::None, flow_control: FlowControl::None, }; uart.set_config(&config)?; uart.set_read_timeout(Duration::from_millis(50)); uart.set_write_timeout(Duration::from_millis(500)); uart.flush_fifos(true, true)?; uart.write(&[0x01, 0x02, 0x03][..])?; let mut buf: [u8; 256] = [0; 256]; uart.read(&mut buf)?; } Ok(()) } fn main() { match run() { Err(err) => { eprintln!("ERROR: {}", err); } _ => {} } }
#[macro_use] extern crate criterion; extern crate red_mod; use criterion::Criterion; fn criterion_benchmark(_: &mut Criterion) { // c.bench_function("hash", move |b| { // // This will avoid timing the to_vec call. // b.iter_with_setup(|| std::collections::HashMap::<u64, &str>::new(), |mut data| hash_insert(data)) // }); // // c.bench_function("btree", move |b| { // // This will avoid timing the to_vec call. // b.iter_with_setup(|| std::collections::BTreeMap::<u64, &str>::new(), |mut data| btree_insert(data)) // }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
use crate::null_collider::NullCollider; use crate::sphere_collider::SphereCollider; use crate::plane_collider::PlaneCollider; use crate::mesh_collider::MeshCollider; use crate::aligned_box_collider::AlignedBoxCollider; /// How [crate::Collider] generics are passed into [crate::PhysicsSystem]. /// /// As it turns out, an enum is easier to work with than a `Box<dyn ...>`. pub enum ColliderWrapper { Null(NullCollider), Sphere(SphereCollider), Plane(PlaneCollider), Mesh(MeshCollider), AlignedBox(AlignedBoxCollider), }
use std::str::FromStr; use num::BigInt; pub struct MersenneNumber { index: BigInt, } impl MersenneNumber { pub fn new<T>(i: T) -> MersenneNumber where BigInt: From<T>, { let int = BigInt::from(i); MersenneNumber { index: int } } //pub fn nth(n: isize) -> BigInt {} } pub fn get_mersenne_primes() -> Vec<BigInt> { // notice this is index const TEXT: &str = include_str!("mersenne_primes.txt"); let mut result: Vec<BigInt> = vec![]; for s in TEXT.lines() { let i = BigInt::from_str(s).unwrap(); result.push(i) } result }
use super::{Response,ReqErr}; use std::env; use std::io::Read; use std::fs::File; use std::fs::read_dir; use std::path::Path; use std::net::TcpStream; use regex::Regex; const WEB_SERVER_NAME: &'static str = "jrp338-kqj094-web-server/0.1"; /* read_stream */ // takes in a TcpStream and reads contents into buffer // returns contents in String format pub fn read_stream(stream: &mut TcpStream) -> String { let mut buf = [0; 128]; let mut contents = String::new(); while let Ok(bytes_read) = stream.read(&mut buf) { let c = String::from_utf8(buf.to_vec()).unwrap(); contents.push_str(&c); //in case response does not take up all of buffer if bytes_read < 128 { break; } } contents } // I try // #[cfg(test)] // mod read_stream_tests { // use super::read_stream; // use std::io::Write; // use std::net::{TcpStream, TcpListener}; // #[test] // fn test() { // stream_assert(); // } // fn stream_assert() { // let listener = TcpListener::bind("127.0.0.1:8080").unwrap(); // if let Ok(mut stream) = TcpStream::connect("127.0.0.1:8080") { // let _ = stream.write("foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfo\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}\u{0}".as_bytes()); // let listen_stream = listener.incoming().next(); // let mut result = listen_stream.unwrap().unwrap(); // let output = read_stream(&mut result); // assert_eq!("", output.as_str()); // // assert!(true); // } else { // assert!(false); // } // } // } /* validate_request */ // checks whether request is valid // will return Response if request is valid // will return ReqErr (400, 403, 404) otherwise pub fn validate_request(req_info: &Vec<&str>) -> Result<Response, ReqErr> { //regex to match file name let re = Regex::new(r"^(/[\w\d-_]+)*/[\w\d-_]+").unwrap(); //Step 1: Check if valid request //Check if it is a GET request, //whether file path is really a file path, //and whether the protocol is HTTP if req_info.len() >= 3 && req_info[0] == "GET" && re.is_match(req_info[1]) && req_info[2].starts_with("HTTP") { //Step 2: Check if file exists //generate path with environment's current directory let mut path_string = String::new(); let env_path = env::current_dir().unwrap(); path_string.push_str(&env_path.display().to_string()); path_string.push_str(req_info[1]); // this is a compressed file, cannot open if path_string.contains(".zip") || path_string.contains(".7z") { //(403 Forbidden) return Err(ReqErr::Err403); } let path = Path::new(&path_string); println!("{}", path_string); if path.exists() { // Step 3: Check if it's a file or directory if path.is_file() { //Step 4: Check whether file is not off limits let file = File::open(&path_string); match file { Ok(mut f) => { //200 Ok! Create response return Ok(generate_response(&mut f, &req_info)); }, Err(_) => { //(403 Forbidden) return Err(ReqErr::Err403); } } } else if path.is_dir() { if let Ok(dir_entries) = read_dir(path) { // check for index files for dir_entry in dir_entries { if let Ok(entry) = dir_entry { let file_type_result = entry.file_type(); if let Ok(file_type) = file_type_result { if file_type.is_file() { let entry_name = entry.file_name(); let entry_str = entry_name.to_str().unwrap(); if entry_str == "index.html" || entry_str == "index.shtml" || entry_str == "index.txt" { let file = File::open(&entry.path().as_path()); match file { Ok(mut f) => { //200 Ok! Create response return Ok(generate_response(&mut f, &req_info)); }, Err(_) => { //(403 Forbidden) return Err(ReqErr::Err403); } } } } } } } // return 404 if not found return Err(ReqErr::Err404); } else { // since we already checked if the path exists // and it's a directory, that means that any error // comes from forbidden access to directory return Err(ReqErr::Err403); } } } return Err(ReqErr::Err404) } else { //(400 Bad Request) return Err(ReqErr::Err400); } } #[cfg(test)] mod validate_request_tests { use super::{Response, ReqErr, validate_request,WEB_SERVER_NAME}; #[test] fn empty_input() { validate_assert(&vec![], Err(ReqErr::Err400)); } #[test] fn one_input() { validate_assert(&vec!["lol"], Err(ReqErr::Err400)); } #[test] fn two_input() { validate_assert(&vec!["lol", "meow"], Err(ReqErr::Err400)); } #[test] fn bad_get() { validate_assert(&vec!["get", "/src/", "HTTP"], Err(ReqErr::Err400)); } #[test] fn bad_path() { validate_assert(&vec!["GET", "src/", "HTTP"], Err(ReqErr::Err400)); } #[test] fn bad_proto() { validate_assert(&vec!["GET", "/src/", "http"], Err(ReqErr::Err400)); } #[test] fn path_does_not_exist_file() { validate_assert(&vec!["GET", "/srcLOL/", "HTTP"], Err(ReqErr::Err404)); } #[test] fn successful_file() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/plain".to_owned(), content_length: 4, file_content: "meow".to_owned(), }; validate_assert(&vec!["GET", "/test/meow.txt", "HTTP"], Ok(response)); } #[test] fn no_index_files() { validate_assert(&vec!["GET", "/test/", "HTTP"], Err(ReqErr::Err404)); } #[test] fn successful_html_index() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/html".to_owned(), content_length: 0, file_content: "".to_owned(), }; validate_assert(&vec!["GET", "/test/html/", "HTTP"], Ok(response)); } #[test] fn successful_shtml_index() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/html".to_owned(), content_length: 0, file_content: "".to_owned(), }; validate_assert(&vec!["GET", "/test/shtml/", "HTTP"], Ok(response)); } #[test] fn successful_txt_index() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/plain".to_owned(), content_length: 0, file_content: "".to_owned(), }; validate_assert(&vec!["GET", "/test/txt/", "HTTP"], Ok(response)); } // Need to create forbidden files for forbiddden tests to work // #[test] // fn forbidden_access_file() { // validate_assert(&vec!["GET", "/test/locked_test.txt", "HTTP"], Err(ReqErr::Err403)); // } // #[test] // fn forbidden_access_dir() { // validate_assert(&vec!["GET", "/test/forbidden/", "HTTP"], Err(ReqErr::Err403)); // } // #[test] // fn forbidden_html_index() { // validate_assert(&vec!["GET", "/test/forbidden_html/", "HTTP"], Err(ReqErr::Err403)); // } // #[test] // fn forbidden_shtml_index() { // validate_assert(&vec!["GET", "/test/forbidden_shtml/", "HTTP"], Err(ReqErr::Err403)); // } // #[test] // fn forbidden_txt_index() { // validate_assert(&vec!["GET", "/test/forbidden_txt/", "HTTP"], Err(ReqErr::Err403)); // } fn validate_assert(req: &Vec<&str>, expected: Result<Response, ReqErr>) { let output = validate_request(req); assert_eq!(expected, output); } } /* generate_response */ //takes in the file to be read, request info //returns a Response //generates response to be written onto stream //does not validate req_info because this function is only called // by validate_request(), which handles validation fn generate_response(file: &mut File, req_info: &Vec<&str>) -> Response { let mut file_contents = String::new(); let bytes_read = file.read_to_string(&mut file_contents).unwrap(); //checks whether content is html or plain let mut content_type = String::new(); content_type.push_str("text/"); if req_info[1].contains("html") { content_type.push_str("html"); } else { content_type.push_str("plain"); } //should be some variant of HTTP let protocol = req_info[2]; Response { protocol: protocol.to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: content_type, content_length: bytes_read, file_content: file_contents, } } #[cfg(test)] mod generate_response_tests { use std::env; use std::fs::File; use super::{Response, generate_response, WEB_SERVER_NAME}; #[test] fn txt_response() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/plain".to_owned(), content_length: 4, file_content: "meow".to_owned(), }; response_assert(&vec!["GET", "/test/meow.txt", "HTTP"], response); } #[test] fn html_response() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/html".to_owned(), content_length: 0, file_content: "".to_owned(), }; response_assert(&vec!["GET", "/test/html/index.html", "HTTP"], response); } #[test] fn shtml_response() { let response = Response { protocol: "HTTP".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/html".to_owned(), content_length: 0, file_content: "".to_owned(), }; response_assert(&vec!["GET", "/test/shtml/index.shtml", "HTTP"], response); } #[test] fn http11_response() { let response = Response { protocol: "HTTP/1.1".to_owned(), status_message: "200 OK".to_owned(), web_server_name: WEB_SERVER_NAME.to_owned(), content_type: "text/html".to_owned(), content_length: 0, file_content: "".to_owned(), }; response_assert(&vec!["GET", "/test/shtml/index.shtml", "HTTP/1.1"], response); } fn response_assert(req_info: &Vec<&str>, expected: Response) { let mut path_string = String::new(); let env_path = env::current_dir().unwrap(); path_string.push_str(&env_path.display().to_string()); path_string.push_str(req_info[1]); let file = File::open(&path_string); let output = generate_response(&mut file.unwrap(), req_info); assert_eq!(expected, output); } }
use crate::listnode::*; pub fn merge_two_lists(l1: Option<Box<ListNode>>, l2: Option<Box<ListNode>>) -> Option<Box<ListNode>> { if l1.is_none() { return l2 } if l2.is_none() { return l1 } let mut dummy = Box::new(ListNode::new(0)); let mut p1 = l1; let mut p2 = l2; let mut p = dummy.as_mut(); loop { match &p1 { None => { p.next = p2.clone(); break; }, Some(q1) => { match &p2 { None => { p.next = p1.clone(); break; }, Some(q2) => { if q1.val < q2.val { p.next = p1.clone(); p = p.next.as_mut().unwrap(); p1 = p1.unwrap().next; } else { p.next = p2.clone(); p = p.next.as_mut().unwrap(); p2 = p2.unwrap().next; } }, } }, } } dummy.next } #[test] fn test_merge_two_lists() { assert_eq!(list_to_vec(merge_two_lists(arr_to_list(&[1, 2, 4]), arr_to_list(&[1, 3, 4]))), vec![1, 1, 2, 3, 4, 4]); }
use azure_core::errors::{AzureError, UnexpectedHTTPResult}; use hyper::body; use hyper::client::ResponseFuture; use url::Url; #[derive(Debug)] pub struct PerformRequestResponse { pub(crate) url: Url, pub(crate) response_future: ResponseFuture, } impl PerformRequestResponse { pub fn url(&self) -> &Url { &self.url } pub async fn check_status_extract_headers_and_body( self, expected_status_code: hyper::StatusCode, ) -> Result<(hyper::HeaderMap, body::Bytes), AzureError> { let (status, headers, body) = { let (head, body) = self.response_future.await?.into_parts(); (head.status, head.headers, body::to_bytes(body).await?) }; if status == expected_status_code { Ok((headers, body)) } else { Err(AzureError::UnexpectedHTTPResult(UnexpectedHTTPResult::new( expected_status_code, status, std::str::from_utf8(&body)?, ))) } } } impl std::convert::From<(Url, ResponseFuture)> for PerformRequestResponse { fn from(values: (Url, ResponseFuture)) -> Self { PerformRequestResponse { url: values.0, response_future: values.1, } } }
/* * Copyright (C) 2019-2022 TON Labs. All Rights Reserved. * * Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use * this file except in compliance with the License. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific TON DEV software governing permissions and * limitations under the License. */ mod tokenize_tests { use crate::{Int, Param, ParamType, Token, TokenValue, Uint}; // use serde::Serialize; use std::collections::{BTreeMap, HashMap}; use token::{Detokenizer, Tokenizer}; use ton_block::{Grams, MsgAddress}; use ton_types::{AccountId, BuilderData, Cell, SliceData}; #[test] fn test_tokenize_ints() { let max_gram = 0x007F_FFFF_FFFF_FFFF_FFFF_FFFF_FFFF_FFFFu128; // 2^120 - 1 let input = r#"{ "a" : 123, "b" : -456, "c" : "-0xabcdef", "e" : "789", "f" : "-12345678900987654321", "g" : "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFF", "h" : "-1000", "i" : "1000" }"#; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Uint(8), }, Param { name: "b".to_owned(), kind: ParamType::Int(16), }, Param { name: "c".to_owned(), kind: ParamType::Int(32), }, Param { name: "e".to_owned(), kind: ParamType::Uint(13), }, Param { name: "f".to_owned(), kind: ParamType::Int(128), }, Param { name: "g".to_owned(), kind: ParamType::Token, }, Param { name: "h".to_owned(), kind: ParamType::VarInt(16), }, Param { name: "i".to_owned(), kind: ParamType::VarUint(32), }, ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Uint(Uint::new(123, 8)), }, Token { name: "b".to_owned(), value: TokenValue::Int(Int::new(-456, 16)), }, Token { name: "c".to_owned(), value: TokenValue::Int(Int::new(-0xabcdef, 32)), }, Token { name: "e".to_owned(), value: TokenValue::Uint(Uint::new(789, 13)), }, Token { name: "f".to_owned(), value: TokenValue::Int(Int::new(-12345678900987654321i128, 128)), }, Token::new("g", TokenValue::Token(Grams::new(max_gram).unwrap())), Token { name: "h".to_owned(), value: TokenValue::VarInt(16, (-1000i32).into()), }, Token { name: "i".to_owned(), value: TokenValue::VarUint(32, 1000u32.into()), }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_int_checks() { // number doesn't fit into parameter size let input = r#"{ "a" : 128 }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Uint(7), }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).is_err()); // number doesn't fit into i64 range used in serde_json let input = r#"{ "a" : 12345678900987654321 }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Int(64), }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).is_err()); // test BigInt::bits() case for -2^n values let input_fit = r#"{ "a" : -128 }"#; let input_not_fit = r#"{ "a" : -129 }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Int(8), }]; assert!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_fit).unwrap()).is_ok() ); assert!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_not_fit).unwrap()) .is_err() ); // negative values for uint let input_num = r#"{ "a" : -1 }"#; let input_str = r#"{ "a" : "-5" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Uint(8), }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_num).unwrap()).is_err()); assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_str).unwrap()).is_err()); // varint max check let input = r#"{ "a" : "0xffffffffffffffffffffffffffffffff" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::VarInt(16), }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).is_err()); // negative values for varuint let input_num = r#"{ "a" : -1 }"#; let input_str = r#"{ "a" : "-5" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::VarUint(8), }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_num).unwrap()).is_err()); assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_str).unwrap()).is_err()); } #[test] fn test_tokenize_bool() { let input = r#"{ "a" : true, "b" : "false" }"#; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Bool, }, Param { name: "b".to_owned(), kind: ParamType::Bool, }, ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Bool(true), }, Token { name: "b".to_owned(), value: TokenValue::Bool(false), }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_empty() { let input = r#"{}"#; let params = vec![]; let expected_tokens = vec![]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_arrays() { let input = r#"{ "a" : [123, -456, "789", "-0x0ABc"], "b" : [ [false, "true"], [true, true, false] ] }"#; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Array(Box::new(ParamType::Int(16))), }, Param { name: "b".to_owned(), kind: ParamType::FixedArray( Box::new(ParamType::Array(Box::new(ParamType::Bool))), 2, ), }, ]; let dint_array = vec![ TokenValue::Int(Int::new(123, 16)), TokenValue::Int(Int::new(-456, 16)), TokenValue::Int(Int::new(789, 16)), TokenValue::Int(Int::new(-0x0abc, 16)), ]; let bool_array1 = vec![TokenValue::Bool(false), TokenValue::Bool(true)]; let bool_array2 = vec![ TokenValue::Bool(true), TokenValue::Bool(true), TokenValue::Bool(false), ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Array(ParamType::Int(16), dint_array), }, Token { name: "b".to_owned(), value: TokenValue::FixedArray( ParamType::Array(Box::new(ParamType::Bool)), vec![ TokenValue::Array(ParamType::Bool, bool_array1), TokenValue::Array(ParamType::Bool, bool_array2), ]), }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_tuple() { let input = r#"{ "t1" : { "a" : [-123, "456", "0x789"], "b" : "false", "c" : "0x1234" }, "t2" : [ { "a" : true, "b" : "0x12" }, { "a" : false, "b" : "0x34" }, { "a" : true, "b" : "0x56" } ] }"#; let tuple_params1 = vec![ Param { name: "a".to_owned(), kind: ParamType::Array(Box::new(ParamType::Int(16))), }, Param { name: "b".to_owned(), kind: ParamType::Bool, }, Param { name: "c".to_owned(), kind: ParamType::Int(16), }, ]; let tuple_params2 = vec![ Param { name: "a".to_owned(), kind: ParamType::Bool, }, Param { name: "b".to_owned(), kind: ParamType::Int(8), }, ]; let params = vec![ Param { name: "t1".to_owned(), kind: ParamType::Tuple(tuple_params1), }, Param { name: "t2".to_owned(), kind: ParamType::Array(Box::new(ParamType::Tuple(tuple_params2))), }, ]; let expected_tokens = vec![ Token { name: "t1".to_owned(), value: TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Array( ParamType::Int(16), vec![ TokenValue::Int(Int::new(-123, 16)), TokenValue::Int(Int::new(456, 16)), TokenValue::Int(Int::new(0x789, 16)), ]), }, Token { name: "b".to_owned(), value: TokenValue::Bool(false), }, Token { name: "c".to_owned(), value: TokenValue::Int(Int::new(0x1234, 16)), }, ]), }, Token { name: "t2".to_owned(), value: TokenValue::Array( ParamType::Tuple(vec![ Param { name: "a".to_owned(), kind: ParamType::Bool, }, Param { name: "b".to_owned(), kind: ParamType::Int(8), } ]), vec![ TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Bool(true), }, Token { name: "b".to_owned(), value: TokenValue::Int(Int::new(0x12, 8)), }, ]), TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Bool(false), }, Token { name: "b".to_owned(), value: TokenValue::Int(Int::new(0x34, 8)), }, ]), TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Bool(true), }, Token { name: "b".to_owned(), value: TokenValue::Int(Int::new(0x56, 8)), }, ]), ]), }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_cell() { let input = r#"{ "a": "te6ccgEBAwEAIAACEAECAwQFBgcIAgEAEBUWFxgZGhscABALDA0ODxAREg==", "b": "" }"#; let params = vec![ Param::new("a", ParamType::Cell), Param::new("b", ParamType::Cell), ]; let mut expected_tokens = vec![]; let mut builder = BuilderData::with_bitstring(vec![1, 2, 3, 4, 5, 6, 7, 8, 0x80]).unwrap(); builder.checked_append_reference(BuilderData::with_bitstring(vec![11, 12, 13, 14, 15, 16, 17, 18, 0x80]).unwrap().into_cell().unwrap()).unwrap(); builder.checked_append_reference(BuilderData::with_bitstring(vec![21, 22, 23, 24, 25, 26, 27, 28, 0x80]).unwrap().into_cell().unwrap()).unwrap(); expected_tokens.push(Token::new("a", TokenValue::Cell(builder.into_cell().unwrap()))); expected_tokens.push(Token::new("b", TokenValue::Cell(Cell::default()))); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_hashmap() { let input = r#"{ "a": { "-12": 42, "127": 37, "-128": 56 }, "b": { "4294967295": 777, "65535": 0 }, "c": { "1": { "q1" : 314, "q2" : 15 }, "2": { "q1" : 92, "q2" : 6 } }, "d": { "0:1111111111111111111111111111111111111111111111111111111111111111": 123 } }"#; let params = vec![ Param::new("a", ParamType::Map(Box::new(ParamType::Int(8)), Box::new(ParamType::Uint(32)))), Param::new("b", ParamType::Map(Box::new(ParamType::Uint(32)), Box::new(ParamType::Uint(32)))), Param::new("c", ParamType::Map(Box::new(ParamType::Int(8)), Box::new(ParamType::Tuple(vec![ Param::new("q1", ParamType::Uint(32)), Param::new("q2", ParamType::Int(8)) ])))), Param::new("d", ParamType::Map(Box::new(ParamType::Address), Box::new(ParamType::Uint(32)))), ]; let mut expected_tokens = vec![]; let mut map = BTreeMap::<String, TokenValue>::new(); map.insert(format!("{}", -12i8), TokenValue::Uint(Uint::new(42, 32))); map.insert(format!("{}", 127i8), TokenValue::Uint(Uint::new(37, 32))); map.insert(format!("{}", -128i8), TokenValue::Uint(Uint::new(56, 32))); expected_tokens.push(Token::new("a", TokenValue::Map(ParamType::Int(8), ParamType::Uint(32), map))); let mut map = BTreeMap::<String, TokenValue>::new(); map.insert(format!("{}", 0xFFFFFFFFu32), TokenValue::Uint(Uint::new(777, 32))); map.insert(format!("{}", 0x0000FFFFu32), TokenValue::Uint(Uint::new( 0, 32))); expected_tokens.push(Token::new("b", TokenValue::Map(ParamType::Uint(32), ParamType::Uint(32), map))); let mut map = BTreeMap::<String, TokenValue>::new(); map.insert(format!("{}", 1i8), TokenValue::Tuple(vec![ Token::new("q1", TokenValue::Uint(Uint::new(314, 32))), Token::new("q2", TokenValue::Int(Int::new(15, 8))), ])); map.insert(format!("{}", 2i8), TokenValue::Tuple(vec![ Token::new("q1", TokenValue::Uint(Uint::new(92, 32))), Token::new("q2", TokenValue::Int(Int::new(6, 8))), ])); expected_tokens.push(Token::new("c", TokenValue::Map( ParamType::Int(8), ParamType::Tuple(vec![ Param { name: "q1".to_owned(), kind: ParamType::Uint(32), }, Param { name: "q2".to_owned(), kind: ParamType::Int(8), }, ]), map ))); let mut map = BTreeMap::<String, TokenValue>::new(); map.insert( format!("{}", MsgAddress::with_standart(None, 0, AccountId::from([0x11; 32])).unwrap()), TokenValue::Uint(Uint::new(123, 32))); expected_tokens.push(Token::new("d", TokenValue::Map(ParamType::Address, ParamType::Uint(32), map))); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_address() { let input = r#"{ "std": "-17:5555555555555555555555555555555555555555555555555555555555555555", "var": "-177:555_" }"#; let params = vec![ Param::new("std", ParamType::Address), Param::new("var", ParamType::Address), ]; let expected_tokens = vec![ Token { name: "std".to_owned(), value: TokenValue::Address(MsgAddress::with_standart( None, -17, AccountId::from([0x55; 32])).unwrap()) }, Token { name: "var".to_owned(), value: TokenValue::Address(MsgAddress::with_variant( None, -177, SliceData::new(vec![0x55, 0x50])).unwrap()) }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_bytes() { let input = r#"{ "a": "ABCDEF", "b": "ABCDEF0102", "c": "55555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555" }"#; let params = vec![ Param::new("a", ParamType::Bytes), Param::new("b", ParamType::FixedBytes(3)), Param::new("c", ParamType::Bytes), ]; let expected_tokens = vec![ Token::new("a", TokenValue::Bytes(vec![0xAB, 0xCD, 0xEF])), Token::new("b", TokenValue::FixedBytes(vec![0xAB, 0xCD, 0xEF])), Token::new("c", TokenValue::Bytes(vec![0x55; 160])), ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_time() { let input = r#"{ "a" : 123, "b" : "456", "c" : "0x789", "d": "0xffffffffffffffff" }"#; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Time, }, Param { name: "b".to_owned(), kind: ParamType::Time, }, Param { name: "c".to_owned(), kind: ParamType::Time, }, Param { name: "d".to_owned(), kind: ParamType::Time, } ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Time(123), }, Token { name: "b".to_owned(), value: TokenValue::Time(456), }, Token { name: "c".to_owned(), value: TokenValue::Time(0x789), }, Token { name: "d".to_owned(), value: TokenValue::Time(0xffffffffffffffff), } ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_time_checks() { // number doesn't fit into parameter size let input = r#"{ "a" : "0x10000000000000000" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Time, }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).is_err()); // negative values for time let input_num = r#"{ "a" : -1 }"#; let input_str = r#"{ "a" : "-5" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Time, }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_num).unwrap()).is_err()); assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_str).unwrap()).is_err()); } #[test] fn test_tokenize_expire() { let input = r#"{ "a" : 123, "b" : "456", "c" : "0x789", "d": "0xffffffff" }"#; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Expire, }, Param { name: "b".to_owned(), kind: ParamType::Expire, }, Param { name: "c".to_owned(), kind: ParamType::Expire, }, Param { name: "d".to_owned(), kind: ParamType::Expire, } ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Expire(123), }, Token { name: "b".to_owned(), value: TokenValue::Expire(456), }, Token { name: "c".to_owned(), value: TokenValue::Expire(0x789), }, Token { name: "d".to_owned(), value: TokenValue::Expire(0xffffffff), } ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_expire_checks() { // number doesn't fit into parameter size let input = r#"{ "a" : "0x100000000" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Expire, }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).is_err()); // negative values for expire let input_num = r#"{ "a" : -1 }"#; let input_str = r#"{ "a" : "-5" }"#; let params = vec![Param { name: "a".to_owned(), kind: ParamType::Expire, }]; assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_num).unwrap()).is_err()); assert!(Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input_str).unwrap()).is_err()); } #[test] fn test_tokenize_pubkey() { let input = r#"{ "a": "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", "b": "" }"#; let params = vec![ Param::new("a", ParamType::PublicKey), Param::new("b", ParamType::PublicKey) ]; let expected_tokens = vec![ Token::new("a", TokenValue::PublicKey(Some(ed25519_dalek::PublicKey::from_bytes(&[0xcc; 32]).unwrap()))), Token::new("b", TokenValue::PublicKey(None)) ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); // check that detokenizer gives the same result let input = Detokenizer::detokenize(&expected_tokens).unwrap(); println!("{}", input); assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(&input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_optional() { let input = r#"{ "a": 123, "b": null }"#; let params = vec![ Param::new("a", ParamType::Optional(Box::new(ParamType::VarUint(32)))), Param::new("b", ParamType::Optional(Box::new(ParamType::VarUint(32)))), Param::new("c", ParamType::Optional(Box::new(ParamType::VarUint(32)))), ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Optional(ParamType::VarUint(32), Some(Box::new(TokenValue::VarUint(32, 123u32.into())))), }, Token { name: "b".to_owned(), value: TokenValue::Optional(ParamType::VarUint(32), None), }, Token { name: "c".to_owned(), value: TokenValue::Optional(ParamType::VarUint(32), None), }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_tokenize_ref() { let input = r#"{ "a": 123, "b": { "c": true, "d": "some string" } }"#; let params = vec![ Param::new("a", ParamType::Ref(Box::new(ParamType::VarUint(32)))), Param::new("b", ParamType::Ref( Box::new(ParamType::Tuple(vec![ Param::new("c", ParamType::Bool), Param::new("d", ParamType::String), ])) )), ]; let expected_tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Ref(Box::new(TokenValue::VarUint(32, 123u32.into()))), }, Token { name: "b".to_owned(), value: TokenValue::Ref( Box::new(TokenValue::Tuple(vec![ Token { name: "c".to_owned(), value: TokenValue::Bool(true), }, Token { name: "d".to_owned(), value: TokenValue::String("some string".to_owned()), }, ])) ) }, ]; assert_eq!( Tokenizer::tokenize_all_params(&params, &serde_json::from_str(input).unwrap()).unwrap(), expected_tokens ); } #[test] fn test_unknown_param() { let input = r#"{ "a": 123, "b": 456 }"#; let params = vec![ Param::new("a", ParamType::Time) ]; assert!( Tokenizer::tokenize_optional_params( &params, &serde_json::from_str(input).unwrap(), &HashMap::new() ).is_err(), ); } } mod types_check_tests { use {Int, Param, ParamType, Token, TokenValue, Uint}; use ton_block::MsgAddress; use ton_types::Cell; use std::collections::BTreeMap; #[test] fn test_type_check() { fn assert_type_check(tokens: &[Token], params: &[Param]) { assert!(Token::types_check(&tokens, params)) } fn assert_not_type_check(tokens: &[Token], params: &[Param]) { assert!(!Token::types_check(&tokens, params)) } let big_int = Int::new(123, 64); let big_uint = Uint::new(456, 32); let mut map = BTreeMap::<String, TokenValue>::new(); map.insert("1".to_string(), TokenValue::Uint(Uint::new(17, 32))); let tokens = vec![ Token { name: "a".to_owned(), value: TokenValue::Uint(big_uint.clone()), }, Token { name: "b".to_owned(), value: TokenValue::Int(big_int.clone()), }, Token { name: "c".to_owned(), value: TokenValue::VarUint(32, 789u32.into()), }, Token { name: "d".to_owned(), value: TokenValue::VarInt(16, 1000u32.into()), }, Token { name: "e".to_owned(), value: TokenValue::Bool(false), }, Token { name: "f".to_owned(), value: TokenValue::Array( ParamType::Bool, vec![TokenValue::Bool(false), TokenValue::Bool(true)] ), }, Token { name: "g".to_owned(), value: TokenValue::FixedArray( ParamType::Int(64), vec![ TokenValue::Int(big_int.clone()), TokenValue::Int(big_int.clone()), ]), }, Token { name: "j".to_owned(), value: TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Bool(true), }, Token { name: "b".to_owned(), value: TokenValue::Uint(big_uint.clone()), }, ]), }, Token { name: "k".to_owned(), value: TokenValue::Cell(Cell::default()), }, Token { name: "l".to_owned(), value: TokenValue::Address(MsgAddress::AddrNone) }, Token { name: "m1".to_owned(), value: TokenValue::Map(ParamType::Int(8), ParamType::Bool, BTreeMap::<String, TokenValue>::new()) }, Token { name: "m2".to_owned(), value: TokenValue::Map(ParamType::Int(8), ParamType::Uint(32), map) }, Token { name: "n".to_owned(), value: TokenValue::Bytes(vec![1]) }, Token { name: "o".to_owned(), value: TokenValue::FixedBytes(vec![1, 2, 3]) }, Token { name: "p".to_owned(), value: TokenValue::Token(17u64.into()) }, Token { name: "q".to_owned(), value: TokenValue::Time(123) }, Token { name: "r".to_owned(), value: TokenValue::Expire(456) }, Token { name: "s".to_owned(), value: TokenValue::PublicKey(None) }, Token { name: "t".to_owned(), value: TokenValue::String("123".to_owned()) }, Token { name: "u".to_owned(), value: TokenValue::Optional(ParamType::Int(256), None), }, Token { name: "v".to_owned(), value: TokenValue::Optional(ParamType::Bool, Some(Box::new(TokenValue::Bool(true)))), }, Token { name: "w".to_owned(), value: TokenValue::Ref(Box::new(TokenValue::String("123".to_owned()))), }, ]; let tuple_params = vec![ Param { name: "a".to_owned(), kind: ParamType::Bool, }, Param { name: "b".to_owned(), kind: ParamType::Uint(32), }, ]; let params = vec![ Param { name: "a".to_owned(), kind: ParamType::Uint(32), }, Param { name: "b".to_owned(), kind: ParamType::Int(64), }, Param { name: "c".to_owned(), kind: ParamType::VarUint(32), }, Param { name: "d".to_owned(), kind: ParamType::VarInt(16), }, Param { name: "e".to_owned(), kind: ParamType::Bool, }, Param { name: "f".to_owned(), kind: ParamType::Array(Box::new(ParamType::Bool)), }, Param { name: "g".to_owned(), kind: ParamType::FixedArray(Box::new(ParamType::Int(64)), 2), }, Param { name: "j".to_owned(), kind: ParamType::Tuple(tuple_params), }, Param { name: "k".to_owned(), kind: ParamType::Cell, }, Param { name: "l".to_owned(), kind: ParamType::Address, }, Param { name: "m1".to_owned(), kind: ParamType::Map(Box::new(ParamType::Int(8)), Box::new(ParamType::Bool)), }, Param { name: "m2".to_owned(), kind: ParamType::Map(Box::new(ParamType::Int(8)), Box::new(ParamType::Uint(32))), }, Param { name: "n".to_owned(), kind: ParamType::Bytes, }, Param { name: "o".to_owned(), kind: ParamType::FixedBytes(3), }, Param { name: "p".to_owned(), kind: ParamType::Token, }, Param { name: "q".to_owned(), kind: ParamType::Time, }, Param { name: "r".to_owned(), kind: ParamType::Expire, }, Param { name: "s".to_owned(), kind: ParamType::PublicKey, }, Param { name: "t".to_owned(), kind: ParamType::String, }, Param { name: "u".to_owned(), kind: ParamType::Optional(Box::new(ParamType::Int(256))), }, Param { name: "v".to_owned(), kind: ParamType::Optional(Box::new(ParamType::Bool)), }, Param { name: "w".to_owned(), kind: ParamType::Ref(Box::new(ParamType::String)), }, ]; assert_type_check(&tokens, &params); let mut tokens_wrong_type = tokens.clone(); tokens_wrong_type[0] = Token { name: "a".to_owned(), value: TokenValue::Bool(false), }; assert_not_type_check(&tokens_wrong_type, &params); let mut tokens_wrong_int_size = tokens.clone(); tokens_wrong_int_size[0] = Token { name: "a".to_owned(), value: TokenValue::Uint(Uint::new(456, 30)), }; assert_not_type_check(&tokens_wrong_int_size, &params); let mut tokens_wrong_parameters_count = tokens.clone(); tokens_wrong_parameters_count.pop(); assert_not_type_check(&tokens_wrong_parameters_count, &params); let mut tokens_wrong_fixed_array_size = tokens.clone(); tokens_wrong_fixed_array_size[6] = Token { name: "g".to_owned(), value: TokenValue::FixedArray( ParamType::Int(64), vec![TokenValue::Int(big_int.clone()) ]), }; assert_not_type_check(&tokens_wrong_fixed_array_size, &params); let mut tokens_wrong_array_type = tokens.clone(); tokens_wrong_array_type[5] = Token { name: "f".to_owned(), value: TokenValue::Array( ParamType::Bool, vec![ TokenValue::Bool(false), TokenValue::Int(big_int.clone()), ]), }; assert_not_type_check(&tokens_wrong_array_type, &params); let mut tokens_wrong_tuple_type = tokens.clone(); tokens_wrong_tuple_type[9] = Token { name: "f".to_owned(), value: TokenValue::Tuple(vec![ Token { name: "a".to_owned(), value: TokenValue::Int(big_int.clone()), }, Token { name: "b".to_owned(), value: TokenValue::Uint(big_uint.clone()), }, ]), }; assert_not_type_check(&tokens_wrong_tuple_type, &params); } } mod default_values_tests { use {ParamType, TokenValue}; use chrono::prelude::Utc; #[test] fn test_time_default_value() { if let TokenValue::Time(time) = TokenValue::get_default_value_for_header(&ParamType::Time).unwrap() { let now = Utc::now().timestamp_millis() as u64; assert!(time <= now && time >= now - 1000); } else { panic!("Wrong value type"); } } #[test] fn test_default_values() { let param_types = vec![ParamType::Expire, ParamType::PublicKey]; let default_values = vec![TokenValue::Expire(0xffffffff), TokenValue::PublicKey(None)]; for (param_type, value) in param_types.iter().zip(default_values) { assert_eq!(TokenValue::get_default_value_for_header(&param_type).unwrap(), value); } } }
use core::ops::Drop; use fs::Stat; /// VNode represents an in-memory inode pub trait VNode: Sync + Send { /// Read from the node fn read(&mut self, data: &mut [u8], offset: u64) -> Result<u64, ::common::error::Error> { return Err(err!(EINVAL)); } /// Write to the node fn write(&mut self, data: &[u8], offset: u64) -> Result<u64, ::common::error::Error> { return Err(err!(EINVAL)); } /// Ioctl fn ioctl(&mut self, op: u32, data: usize) -> Result<u64, ::common::error::Error> { return Err(err!(EINVAL)); } /// Create file fn creat(&mut self, name: &str, option: u32) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Make directory fn mkdir(&mut self, name: &str) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Remove directory fn rmdir(&mut self, name: &str) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Get file stat fn stat(&self) -> Result<Stat, ::common::error::Error> { return Err(err!(EINVAL)); } /// Chmod fn chmod(&mut self, mode: u16) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Chown fn chown(&mut self, owner: u32) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Chgrp fn chgrp(&mut self, group: u32) -> Result<(), ::common::error::Error> { return Err(err!(EINVAL)); } /// Reclaim fn reclaim(&mut self) -> Result<(), ::common::error::Error>; }
use quinn::{ConnectError, ConnectionError, EndpointError, ParseError, ReadToEndError, WriteError}; #[derive(Debug, thiserror::Error)] pub enum Error { #[error("IO Error: {0}")] Io(#[from] std::io::Error), // #[error("ReadToEnd: {0}")] // ReadToEnd(#[from] ReadToEndError), #[error("ConnectionError: {0}")] ConnectionError(#[from] ConnectionError), #[error("ConnectError: {0}")] ConnectError(#[from] ConnectError), #[error("EndpointError: {0}")] EndpointError(#[from] EndpointError), #[error("WriteError: {0}")] WriteError(#[from] WriteError), #[error("ReadToEndError: {0}")] ReadToEndError(#[from] ReadToEndError), #[error("QuinnConnectError: {0}")] QuinnParseError(#[from] ParseError), #[error("ConfigError: {0}")] ConfigError(#[from] quinn_proto::ConfigError), #[error("TLSError: {0}")] TLSError(#[from] rustls::TLSError), #[error("Redis: {0}")] Redis(#[from] redis::RedisError), #[error("ProtocolParseError {0}")] ProtocolParseError(String), #[error("UnknownCodec")] UnknownCodec, #[error("SerdeErased {0}")] SerdeErased(#[from] erased_serde::Error), }
use libip6tc_sys as sys; use std::ffi::CString; use std::mem::{forget, size_of, size_of_val}; use std::net::Ipv6Addr; // use std::os::raw::c_int; use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; use std::fmt; use std::marker::PhantomData; use std::ptr::NonNull; const ALIGN: usize = size_of::<u64>(); pub trait Entry: fmt::Debug + Clone { type Sys; } impl Entry for Ipv6Addr { type Sys = sys::ip6t_entry; } // impl Entry for sys::ipt_entry {} #[derive(Clone, Debug)] struct RuleBuf<E: Entry> { // ip6t_entry has 8 byte alignment (so does XT_ALIGN) ptr: NonNull<u8>, cap: usize, phantom: PhantomData<(Box<E>, Vec<sys::xt_entry_match>)>, } #[derive(Clone, Debug)] pub struct Rule<E: Entry> { buf: RuleBuf<E>, } #[derive(Clone, Debug)] pub struct RuleBuilder<E: Entry> { buf: RuleBuf<E>, len: usize, } struct Match<T> { m: sys::xt_entry_match, data: T, } impl<E: Entry> Drop for RuleBuf<E> { fn drop(&mut self) { let layout = Layout::from_size_align(self.cap, ALIGN).unwrap(); unsafe { Global.dealloc(self.ptr, layout) }; } } impl RuleBuilder<Ipv6Addr> { pub fn ip6() -> Self { RuleBuilder::new() } fn src(self, ip: Ipv6Addr) -> Self { let mut entry = self.buf.ptr.cast::<sys::ip6t_entry>(); let entry = unsafe { entry.as_mut() }; entry.ipv6.src.__in6_u.__u6_addr16 = ip.segments(); self } } impl<E: Entry> RuleBuilder<E> { fn new() -> Self { // Start with the standard size for IPv6 let cap = size_of::<E::Sys>(); debug_assert_eq!(size_of::<E::Sys>() % ALIGN, 0); // let ptr = unsafe { alloc_zeroed(Layout::from_size_align(cap, ALIGN).unwrap()) }; let layout = Layout::from_size_align(cap, ALIGN).unwrap(); let block = Global .alloc(layout, AllocInit::Zeroed) .unwrap_or_else(|_| handle_alloc_error(layout)); RuleBuilder { buf: RuleBuf { ptr: block.ptr, cap: block.size, phantom: PhantomData, }, len: size_of::<sys::ip6t_entry>(), } } // fn entry(&mut self) -> &mut sys::ip6t_entry { // unsafe { self.buf.ptr.cast().as_mut() } // } fn extend<T>(&mut self) -> &mut T { // see XT_ALIGN macro let size = size_of::<T>(); let mask = ALIGN - 1; let size = (size + mask) & !(mask); let old_len = self.len; self.len += size; if self.len > self.buf.cap { let new_cap = 2 * self.len; let layout = Layout::from_size_align(self.buf.cap, ALIGN).unwrap(); dbg!(self.buf.cap, self.len, &layout); let block = unsafe { Global.grow( self.buf.ptr, layout, new_cap, ReallocPlacement::MayMove, AllocInit::Zeroed, ) } .unwrap_or_else(|_| handle_alloc_error(layout)); self.buf.ptr = block.ptr; self.buf.cap = block.size; dbg!(self.buf.ptr, self.buf.cap); } unsafe { &mut *(self.buf.ptr.as_ptr().add(old_len) as *mut _) } } fn match_comment(mut self, comment: &str) -> Self { let comment_c = CString::new(comment).unwrap(); const MAX: usize = sys::XT_MAX_COMMENT_LEN as _; assert!(comment.len() < MAX, "max length is 255 (plus null byte)"); let mut comment = [0i8; MAX]; cast_signed(&mut comment[0..comment_c.as_bytes().len()]) .copy_from_slice(comment_c.as_bytes()); let name_c = CString::new("comment").unwrap(); let mut name = [0i8; 29]; cast_signed(&mut name[0..name_c.as_bytes().len()]).copy_from_slice(name_c.as_bytes()); let m = self.extend::<Match<sys::xt_comment_info>>(); m.m.match_size = size_of_val(&m) as _; m.m.name = name; m.m.revision = 0; m.data = sys::xt_comment_info { comment }; dbg!("comment"); self } fn target_accept(mut self) -> Rule<E> { dbg!("target"); let data = self.extend::<sys::xt_standard_target>(); data.verdict = sys::NF_ACCEPT as _; let rule = Rule { buf: self.buf.clone(), }; forget(self); rule } } fn cast_signed(x: &mut [i8]) -> &mut [u8] { unsafe { &mut *(x as *mut [i8] as *mut [u8]) } } #[cfg(test)] mod tests { use super::*; #[test] fn build_rule() { // let builder = RuleBuilder::ip6(); // builder.src("2001:db8::".parse().unwrap()); // builder.match_comment("hello world"); let builder = RuleBuilder::ip6() .src("2001:db8::".parse().unwrap()) .match_comment("hello world"); let _rule: Rule<_> = builder.target_accept(); } }
#![no_std] #![no_main] extern crate panic_halt; use riscv_rt::entry; use gd32vf103_hal as hal; use hal::prelude::*; use hal::pac as pac; extern "C" { fn enable_mcycle_minstret(); // fn disable_mcycle_minstret(); // fn trap_entry(); // fn irq_entry(); } #[entry] fn main() -> ! { unsafe { enable_mcycle_minstret() }; let dp = pac::Peripherals::take().unwrap(); let mut rcu = dp.RCU.constrain(); let mut gpioa = dp.GPIOA.split(&mut rcu.apb2); let mut pa1 = gpioa.pa1.into_push_pull_output(&mut gpioa.ctl0); pa1.set_low().unwrap(); loop {} }
//! <https://github.com/EOSIO/eosio.cdt/blob/4985359a30da1f883418b7133593f835927b8046/libraries/eosiolib/contracts/eosio/privileged.hpp#L40-L160> use crate::{NumBytes, Read, Write}; /// Tunable blockchain configuration that can be changed via consensus #[derive( Read, Write, NumBytes, Clone, Default, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize, )] #[eosio_core_root_path = "crate"] pub struct BlockchainParameters { /// The maximum net usage in instructions for a block pub max_block_net_usage: u64, /// The target percent (1% == 100, 100% == 10,000) of maximum net usage; /// exceeding this triggers congestion handling pub target_block_net_usage_pct: u32, /// The maximum objectively measured net usage that the chain will /// allow regardless of account limits pub max_transaction_net_usage: u32, /// The base amount of net usage billed for a transaction to cover /// incidentals pub base_per_transaction_net_usage: u32, /// The amount of net usage leeway available whilst executing a /// transaction (still checks against new limits wihout leeway at the end /// of the transaction) pub net_usage_leeway: u32, /// The numerator for the discount on net usage of context-free data pub context_free_discount_net_usage_num: u32, /// The denominator for the discount on net usage of context-free data pub context_free_discount_net_usage_den: u32, /// The maximum billable cpu usage (in microseconds) for a block pub max_block_cpu_usage: u32, /// The target percent (1% == 100, 100% == 10,000) of maximum cpu usage; /// exceeding this triggers congestion handling pub target_block_cpu_usage_pct: u32, /// The maximum billable cpu usage (in microseconds) that the chain will /// allow regardless of account limits pub max_transaction_cpu_usage: u32, /// The minimum billable cpu usage (in microseconds) that the chain /// requires pub min_transaction_cpu_usage: u32, /// Maximum lifetime of a transaction pub max_transaction_lifetime: u32, /// The number of seconds after the time a deferred transaction can first /// execute until it expires pub deferred_trx_expiration_window: u32, /// The maximum number of seconds that can be imposed as a delay /// requirement by authorization checks pub max_transaction_delay: u32, /// Maximum size of inline action pub max_inline_action_size: u32, /// Maximum depth of inline action pub max_inline_action_depth: u16, /// Maximum authority depth pub max_authority_depth: u16, }
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ use crate::{core::set::SetObject, email::EmailAddress, Get, Set}; use super::Identity; impl Identity<Set> { pub fn name(&mut self, name: impl Into<String>) -> &mut Self { self.name = Some(name.into()); self } pub fn email(&mut self, email: impl Into<String>) -> &mut Self { self.email = Some(email.into()); self } pub fn bcc<T, U>(&mut self, bcc: Option<T>) -> &mut Self where T: Iterator<Item = U>, U: Into<EmailAddress>, { self.bcc = bcc.map(|s| s.map(|s| s.into()).collect()); self } pub fn reply_to<T, U>(&mut self, reply_to: Option<T>) -> &mut Self where T: Iterator<Item = U>, U: Into<EmailAddress>, { self.reply_to = reply_to.map(|s| s.map(|s| s.into()).collect()); self } pub fn text_signature(&mut self, text_signature: impl Into<String>) -> &mut Self { self.text_signature = Some(text_signature.into()); self } pub fn html_signature(&mut self, html_signature: impl Into<String>) -> &mut Self { self.html_signature = Some(html_signature.into()); self } } impl SetObject for Identity<Set> { type SetArguments = (); fn new(_create_id: Option<usize>) -> Self { Identity { _create_id, _state: Default::default(), id: None, name: None, email: None, reply_to: Vec::with_capacity(0).into(), bcc: Vec::with_capacity(0).into(), text_signature: None, html_signature: None, may_delete: None, } } fn create_id(&self) -> Option<String> { self._create_id.map(|id| format!("c{}", id)) } } impl SetObject for Identity<Get> { type SetArguments = (); fn new(_create_id: Option<usize>) -> Self { unimplemented!() } fn create_id(&self) -> Option<String> { None } }
use std::io::{BufWriter, stdin, stdout, Write}; #[derive(Default)] struct Scanner { buffer: Vec<String> } impl Scanner { fn next<T: std::str::FromStr>(&mut self) -> T { loop { if let Some(token) = self.buffer.pop() { return token.parse().ok().expect("Failed parse"); } let mut input = String::new(); stdin().read_line(&mut input).expect("Faild read"); self.buffer = input.split_whitespace().rev().map(String::from).collect(); } } } fn main() { let mut scan = Scanner::default(); let out = &mut BufWriter::new(stdout()); let n = scan.next::<usize>(); let s = scan.next::<String>().chars().collect::<Vec<_>>(); for i in 1..n { for j in 0..n { if j + i >= n || s[j] == s[j + i] { writeln!(out, "{}", j).ok(); break; } } } }
use super::ImageSize; use deb_architectures::Architecture; use std::str::FromStr; /// The hash, size, and path of a file that this release file points to. #[derive(Debug, Default, Clone, Hash, PartialEq)] pub struct ReleaseEntry { pub sum: String, pub size: u64, pub path: String, } impl ReleaseEntry { /// If required, the precise variant of an apt entry can be determined here. /// /// Malformed / unsupported apt entries will return `None`. pub fn variant(&self) -> Option<EntryVariant> { entry_variant(&self.path) } } impl FromStr for ReleaseEntry { type Err = &'static str; fn from_str(input: &str) -> Result<Self, Self::Err> { let mut iterator = input.split_whitespace(); let output = Self { sum: iterator.next().ok_or("missing sum field")?.to_owned(), size: iterator .next() .ok_or("missing size field")? .parse::<u64>() .map_err(|_| "size field is not a number")?, path: iterator.next().ok_or("missing path field")?.to_owned(), }; Ok(output) } } /// Defines the kind of file that this apt entry is. #[derive(Debug, Clone, Hash, PartialEq)] pub enum EntryVariant { Binary(BinaryEntry, Architecture), Contents(Architecture, Option<String>), Dep11(Dep11Entry), Source(SourceEntry), I18n(I18nEntry), } /// Dep11 entries contain appstream metadata and their required icons. #[derive(Debug, Clone, Hash, PartialEq)] pub enum Dep11Entry { Components(Architecture, Option<String>), Icons(ImageSize, Option<String>), } /// I18n entries contain translations for a given locale. #[derive(Debug, Clone, Hash, PartialEq)] pub enum I18nEntry { Index, Translations(String, Option<String>), } /// Binary entries contain the Packages lists, which dpkg and apt use for dependency resolution. #[derive(Debug, Clone, Hash, PartialEq)] pub enum BinaryEntry { Packages(Option<String>), Release, } /// Similar to binary entries, but for source packages. #[derive(Debug, Clone, Hash, PartialEq)] pub enum SourceEntry { Sources(Option<String>), Release, } // If the apt entry is not a base length, it has an extension. fn extension_from(input: &str, len: usize) -> Option<String> { if input.len() < len + 1 { None } else { Some(input[len + 1..].to_owned()) } } // Apt entries tend to name a variant with a possible extension (compression). fn type_with_extension<T: FromStr>(input: &str) -> Option<(T, Option<String>)> { let (kind, ext) = match input.find('.') { Some(pos) => (&input[..pos], Some(input[pos + 1..].to_owned())), None => (input, None), }; kind.parse::<T>().ok().map(|kind| (kind, ext)) } fn entry_variant(original_path: &str) -> Option<EntryVariant> { let mut path = original_path; let mut found = false; while let Some(pos) = path.find('/') { found = true; let base = &path[..pos]; match base { _ if base.starts_with("binary-") => { let binary = &path[7..]; return binary.find('/').and_then(|pos| { binary[..pos].parse::<Architecture>().ok().and_then(|arch| { let filename = &binary[pos + 1..]; if filename.starts_with("Packages") { let ext = extension_from(filename, 8); Some(EntryVariant::Binary(BinaryEntry::Packages(ext), arch)) } else if filename.starts_with("Release") { Some(EntryVariant::Binary(BinaryEntry::Release, arch)) } else { None } }) }); } "debian-installer" => { return None; // TODO } "dep11" => { let path = &path[6..]; return if path.starts_with("icons-") { type_with_extension::<ImageSize>(&path[6..]) .map(|(res, ext)| EntryVariant::Dep11(Dep11Entry::Icons(res, ext))) } else if path.starts_with("Components-") { type_with_extension::<Architecture>(&path[11..]) .map(|(arch, ext)| EntryVariant::Dep11(Dep11Entry::Components(arch, ext))) } else { None }; } "i18n" => { let path = &path[5..]; return if path.starts_with("Translation") { type_with_extension::<String>(&path[12..]) .map(|(loc, ext)| EntryVariant::I18n(I18nEntry::Translations(loc, ext))) } else if path == "Index" { Some(EntryVariant::I18n(I18nEntry::Index)) } else { None }; } "source" => { let path = &path[7..]; return if path.starts_with("Sources") { let ext = extension_from(path, 7); Some(EntryVariant::Source(SourceEntry::Sources(ext))) } else if path == "Release" { Some(EntryVariant::Source(SourceEntry::Release)) } else { None }; } _ => path = &path[pos + 1..], } } if !found && original_path.starts_with("Contents-") { return type_with_extension::<Architecture>(&original_path[9..]) .map(|(arch, ext)| EntryVariant::Contents(arch, ext)); } None } #[cfg(test)] mod tests { use super::*; #[test] fn entry_parsing() { assert_eq!( entry_variant("binary-amd64/Packages.xz").expect("bad entry result"), EntryVariant::Binary( BinaryEntry::Packages(Some("xz".into())), Architecture::Amd64 ) ) } }
/* Copyright (c) 2023 Uber Technologies, Inc. <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at <p>http://www.apache.org/licenses/LICENSE-2.0 <p>Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use crate::{ models::{default_configs::JAVA, language::PiranhaLanguage}, tests::substitutions, }; use super::PiranhaArgumentsBuilder; #[test] #[should_panic(expected = "Invalid Piranha Argument. Missing `path_to_codebase` or `code_snippet`")] fn piranha_argument_invalid_no_codebase_and_snippet() { let _ = PiranhaArgumentsBuilder::default() .path_to_configurations("some/path".to_string()) .language(PiranhaLanguage::from(JAVA)) .substitutions(substitutions! {"super_interface_name" => "SomeInterface"}) .build(); } #[test] #[should_panic( expected = "Invalid Piranha arguments. Please either specify the `path_to_codebase` or the `code_snippet`. Not Both." )] fn piranha_argument_invalid_both_codebase_and_snippet() { let _ = PiranhaArgumentsBuilder::default() .path_to_configurations("some/path".to_string()) .paths_to_codebase(vec!["dev/null".to_string()]) .code_snippet("class A { }".to_string()) .language(PiranhaLanguage::from(JAVA)) .substitutions(substitutions! {"super_interface_name" => "SomeInterface"}) .build(); }
mod gen1; mod gen2; mod gen3; fn main() { gen1::gen1(); println!("{}","###########"); gen2::gen2(); println!("{}","###########"); gen3::gen3(); }
use lazy_static::*; fn load_key(name: &str) -> Vec<u8> { let raw = std::env::var(name).expect(name); raw.split(',') .map(|byte| { let trimmed = byte.trim(); if trimmed.starts_with("0x") || trimmed.starts_with("0X") { u8::from_str_radix(&trimmed[2..], 16).unwrap() } else { trimmed.parse().unwrap() } }) .collect() } lazy_static! { pub static ref EXHEADER_PUBLIC_KEY: Vec<u8> = load_key("EXHEADER_PUBLIC_KEY"); pub static ref CFA_PUBLIC_KEY: Vec<u8> = load_key("CFA_PUBLIC_KEY"); pub static ref SCRAMBLER: Vec<u8> = load_key("SCRAMBLER"); pub static ref KEY_X: Vec<u8> = load_key("KEY_X"); }
extern crate rusty_aes; use crate::rusty_aes::utils::padder; /* Implement PKCS#7 padding A block cipher transforms a fixed-sized block (usually 8 or 16 bytes) of plaintext into ciphertext. But we almost never want to transform a single block; we encrypt irregularly-sized messages. One way we account for irregularly-sized messages is by padding, creating a plaintext that is an even multiple of the blocksize. The most popular padding scheme is called PKCS#7. So: pad any block to a specific block length, by appending the number of bytes of padding to the end of the block. For instance, "YELLOW SUBMARINE" ... padded to 20 bytes would be: "YELLOW SUBMARINE\x04\x04\x04\x04" */ fn main() { let key: Vec<u8> = "YELLOW SUBMARINE".chars().map(|x| x as u8).collect(); let mut pad_test: Vec<u8> = "YELLOW SUBMARINE".chars().map(|x| x as u8).collect(); pad_test.append(&mut vec![0x80, 0x80, 0x80, 0x80]); assert_eq!(padder::pad(key, 20), pad_test); }
pub mod entities; pub trait Searchable { type Credentials; } pub trait Repository<T> { // Since different databases use different Ids, // I think I should be able to parameterize over it. type Id; fn all(&self) -> Vec<T>; fn get(&self, id: &Self::Id) -> Option<T>; fn save(&mut self, data: &T) -> Self::Id; } pub trait SearchableRepository<T: Searchable> : Repository<T> { fn find(&self, id: &[T::Credentials], limit: Option<u32>) -> Vec<T>; } // The structure is very ad-hoc pub mod usecases { use super::entities::{User, Task}; pub fn find_all_done(user: &User) -> Vec<Task> { user.tasks().iter().filter(|t| t.is_done()).cloned().collect() } // Unsure if this belongs one this level or not use super::Repository; pub fn find_all_done_via_id<R: Repository<User>>(repo: &R, id: &R::Id) -> Vec<Task> { let user = repo.get(&id).expect("No such user!"); find_all_done(&user) } #[cfg(test)] mod test { use super::{User, Task}; #[test] fn test_get_all_done() { let mut one_done = Task::new("One"); one_done.finish(); let mut two_done = Task::new("Two"); two_done.finish(); let not_done = Task::new("Tre"); let mut user: User = User::new("Someone"); user.add_task(one_done.clone()); user.add_task(not_done.clone()); user.add_task(two_done.clone()); let found_done = super::find_all_done(&user); assert_eq!(found_done, vec![one_done, two_done]); } } }
use crate::error::{Error, Result}; use crate::input::DbValue; use crate::model::{Model, Models}; use crate::query::Query; use indexmap::IndexMap; use rusqlite::{params, Connection, OptionalExtension, NO_PARAMS}; use std::sync::Arc; pub struct Db { pub conn: Connection, pub models: Arc<Models>, } impl Db { pub fn conn(models: Arc<Models>) -> Result<Self> { let db_file = "/db/db.db3"; let conn = Connection::open(db_file)?; Ok(Db { conn, models }) } pub fn reset_db() -> Result<()> { std::fs::remove_file("/db/db.db3")?; Ok(()) } pub fn get_model(&self, model: &str) -> Result<&Model> { self.models.get(model) } pub fn select_query<T, F>(&self, query: &Query, f: F) -> Result<()> where F: FnMut(&rusqlite::Row) -> Result<T>, { let sql = query.to_sql(self)?; let mut stmt = self.conn.prepare(&sql)?; // todo: Not sure what the difference between // query_and_then/query_map is // One difference is and_then takes error type parameter let rows = stmt.query_and_then(NO_PARAMS, f)?; // Mostly running the iterator, expecting the closure to // contain the needed code for row in rows { row?; } Ok(()) } pub fn insert(&self, model: &str, values: &IndexMap<String, DbValue>) -> Result<()> { let model = self.get_model(model)?; let names = values .keys() .map(|k| k.to_owned()) .collect::<Vec<_>>() .join(", "); let mut params = Vec::with_capacity(values.len()); let mut placeholders = Vec::with_capacity(values.len()); let mut i = 1; for (key, value) in values.iter() { let field = model.get_field(key)?; field.validate_value(value)?; placeholders.push(format!("?{}", i)); params.push(value.to_rusqlite()); i = i + 1; } let sql = format!( "INSERT INTO {} (\n{}\n) VALUES (\n{}\n)", model.name, names, placeholders.join(", ") ); println!("{}", sql); let mut stmt = self.conn.prepare(&sql)?; let _row_id = stmt.insert(params)?; Ok(()) } pub fn create_tables(&self) -> Result<()> { for model in self.models.models.values() { let create_def = model.create_def(); self.conn.execute(&create_def, NO_PARAMS)?; } Ok(()) } pub fn set_key(&self, key: &str, value: &str) -> Result<()> { self.conn.execute( " insert into key_values (key, value) values ( ?1, ?2 ) on conflict(key) do update set value = ?2 ", params![key, value], )?; Ok(()) } pub fn get_key(&self, key: &str) -> Result<Option<String>> { let res = self .conn .query_row( " select value from key_values where key = ?1", params![key], |row| row.get(0), ) .optional()?; Ok(res) } pub fn add_error( &self, message: String, request_url: String, user_agent: String, time_created: String, on_client: bool, details: String, ) -> Result<()> { self.conn.execute( " insert into error ( message, request_url, user_agent, time_created, on_client, details ) values ( ?1, ?2, ?3, ?4, ?5, ?6 ) ", params![ message, request_url, user_agent, time_created, on_client, details ], )?; Ok(()) } }
use once_cell::sync::Lazy; use regex::Regex; use syn::{ braced, parenthesized, parse::{Parse, ParseStream}, punctuated::Punctuated, token, Attribute, Error, Field, Ident, LitStr, Result, Token, Visibility, }; pub struct ItemStruct { pub attrs: Vec<Attribute>, pub vis: Visibility, pub struct_token: Token![struct], pub ident: Ident, pub brace_token: token::Brace, pub fields: Punctuated<Field, Token![,]>, } impl Parse for ItemStruct { fn parse(input: ParseStream) -> Result<Self> { let content; Ok(ItemStruct { attrs: input.call(Attribute::parse_outer)?, vis: input.parse()?, struct_token: input.parse()?, ident: input.parse()?, brace_token: braced!(content in input), fields: content.parse_terminated(Field::parse_named)?, }) } } pub struct AttrList { pub paren_token: token::Paren, pub options: Punctuated<AttrOption, Token![,]>, } impl Parse for AttrList { fn parse(input: ParseStream) -> Result<Self> { let content; Ok(Self { paren_token: parenthesized!(content in input), options: content.parse_terminated(AttrOption::parse)?, }) } } pub enum AttrOption { Rename(RenameAttr), Ignore(IgnoreAttr), } impl AttrOption { pub fn as_rename(&self) -> Option<&RenameAttr> { if let Self::Rename(v) = self { Some(v) } else { None } } pub fn as_ignore(&self) -> Option<&IgnoreAttr> { if let Self::Ignore(v) = self { Some(v) } else { None } } } impl From<RenameAttr> for AttrOption { fn from(v: RenameAttr) -> Self { Self::Rename(v) } } impl From<IgnoreAttr> for AttrOption { fn from(v: IgnoreAttr) -> Self { Self::Ignore(v) } } pub struct RenameAttr { pub ident: Ident, pub eq_token: Token![=], pub lit: LitStr, pub rename: String, } pub struct IgnoreAttr { pub ident: Ident, } impl Parse for AttrOption { fn parse(input: ParseStream) -> Result<Self> { let ident: Ident = input.parse()?; let attr: Self = match ident.to_string().as_str() { "rename" => { static NAME_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^[[:word:]]+$").unwrap()); let eq_token = input.parse()?; let lit: LitStr = input.parse()?; let rename = lit.value(); NAME_REGEX .find(&rename) .ok_or_else(|| Error::new(lit.span(), "invalid name"))?; RenameAttr { ident, eq_token, lit, rename, } .into() } "ignore" => IgnoreAttr { ident }.into(), name => { return Err(Error::new( ident.span(), format!("invalid attribute '{}'", name), )); } }; Ok(attr) } }
fn get_digits(input: u32) -> Vec<u32> { input .to_string() .chars() .map(|d| d.to_digit(10).unwrap()) .collect() } fn check_adjacent_digits(input: &[u32]) -> bool { for i in 0..input.len() - 1 { if input[i] == input[i + 1] { return true; } } false } fn check_strict_adjacent_digits(input: &[u32]) -> bool { // Check that this is at least normally allowed if !check_adjacent_digits(input) { return false; } // We must have some adjacent digits, so ensure that the minimum length is 2 let mut index: usize = 0; let mut min_len: u32 = std::u32::MAX; while index < 6 { let digit = input[index]; let mut count: u32 = 1; index += 1; while index < 6 && input[index] == digit { index += 1; count += 1; } if min_len > count && count > 1 { min_len = count; } } min_len == 2 } fn check_no_decreasing_digits(input: &[u32]) -> bool { for i in 0..input.len() - 1 { if input[i] > input[i + 1] { return false; } } true } fn check_validity(input: u32) -> bool { // Get the digits let digits = get_digits(input); check_strict_adjacent_digits(&digits) && check_no_decreasing_digits(&digits) } fn count_occurances(min: u32, max: u32) -> u32 { let mut occurances: u32 = 0; for i in min..=max { if check_validity(i) { occurances += 1; dbg!(i); } } occurances } fn main() { dbg!(count_occurances(171309, 643603)); } #[test] fn get_digits_test() { assert_eq!(get_digits(114567), vec![1, 1, 4, 5, 6, 7]); } #[test] fn check_adjacent_digits_test() { assert!(check_adjacent_digits(&vec![1, 1])); assert!(check_adjacent_digits(&vec![1, 1, 2])); assert!(check_adjacent_digits(&vec![1, 2, 2])); assert!(!check_adjacent_digits(&vec![2, 1])); } #[test] fn check_strict_adjacent_digits_test() { assert!(check_strict_adjacent_digits(&vec![1, 1, 2, 2, 3, 3])); assert!(check_strict_adjacent_digits(&vec![1, 1, 1, 1, 2, 2])); assert!(!check_strict_adjacent_digits(&vec![1, 2, 3, 4, 4, 4])); } #[test] fn check_no_decreasing_digits_test() { assert!(check_no_decreasing_digits(&vec![1, 1])); assert!(check_no_decreasing_digits(&vec![1, 1, 1])); assert!(check_no_decreasing_digits(&vec![1, 1, 2])); assert!(!check_no_decreasing_digits(&vec![1, 3, 2])); } #[test] fn check_validity_test() { assert!(check_validity(111111)); assert!(!check_validity(223450)); assert!(!check_validity(123789)); }
// If you'd like to wait for a `process::Child` to finish, you must call // `Child::wait`, which will return a `process::ExitStatus` use std::process::Command; fn main() { let mut child = Command::new("sleep").arg("5").spawn().unwrap(); let _result = child.wait().unwrap(); println!("reached end of main"); }
use types::*; use board::Board; pub trait Bot { fn get_move(&mut self) -> Move; fn update_round(&mut self, round: u32); fn update_board(&mut self, board: Board); fn set_setting(&mut self, setting: Setting); }
use super::lex; use ast_types::*; use nom::*; use std::{mem, str}; // --------------- Some Helper Macros ------------------ // implement the parseable trait for a type, using a interface // similar to named! macro_rules! impl_parse { ($t:ty, $input:ident, $d:block) => ( impl Parseable for $t { fn parse<'a, 'b>($input: &'a[LexicalElement<'b>]) -> IResult<&'a[LexicalElement<'b>], Self>{ $d } } ); ($t:ty, $submac:ident!( $($args:tt)* )) => ( impl Parseable for $t { fn parse<'a, 'b>(i: &'a[LexicalElement<'b>]) -> IResult<&'a[LexicalElement<'b>], Self>{ $submac!(i, $($args)*) } } ); } macro_rules! le_tag { ($i:expr, $inp:pat) => {{ if $i.len() < 1 { IResult::Error(ErrorKind::Custom(0)) } else if let $inp = $i[0] { IResult::Done(&$i[1..], $i[0].clone()) } else { IResult::Error(ErrorKind::Tag) } }}; ($i:expr, $inp:pat => $r:expr) => {{ if $i.len() < 1 { IResult::Error(ErrorKind::Custom(0)) } else if let $inp = $i[0] { IResult::Done(&$i[1..], $r) } else { IResult::Error(ErrorKind::Tag) } }}; } // ---------------- Actual parsers --------------------- impl_parse!( UnOp, alt!( le_tag!(LexicalElement::Minus => UnOp::Neg) | le_tag!(LexicalElement::Keyword("not") => UnOp::Not) | le_tag!(LexicalElement::Hash => UnOp::Len) ) ); impl_parse!( BinOp, alt!( le_tag!(LexicalElement::Plus => BinOp::Plus ) | le_tag!(LexicalElement::Minus => BinOp::Minus ) | le_tag!(LexicalElement::Mult => BinOp::Mult ) | le_tag!(LexicalElement::Div => BinOp::Div ) | le_tag!(LexicalElement::Caret => BinOp::Pow ) | le_tag!(LexicalElement::Mod => BinOp::Mod ) | le_tag!(LexicalElement::Concat => BinOp::Concat ) | le_tag!(LexicalElement::LessEqual => BinOp::LessEqual ) | le_tag!(LexicalElement::LessThan => BinOp::LessThan ) | le_tag!(LexicalElement::GreaterThan => BinOp::GreaterThan ) | le_tag!(LexicalElement::GreaterEqual => BinOp::GreaterEqual) | le_tag!(LexicalElement::Equals => BinOp::Equals ) | le_tag!(LexicalElement::NotEquals => BinOp::NotEquals ) | le_tag!(LexicalElement::Keyword("and") => BinOp::And ) | le_tag!(LexicalElement::Keyword("or") => BinOp::Or ) ) ); impl_parse!( FieldSep, alt!( le_tag!(LexicalElement::Semicolon => FieldSep::Semicolon) | le_tag!(LexicalElement::Comma => FieldSep::Comma) ) ); fn escape_string_literal(a: &str) -> String { // TODO: make this actually escape a.to_string() } fn parse_number(a: &str) -> f64 { //TODO: Make this handle hex literals a.parse::<f64>().unwrap() } impl_parse!( SimpleExp, alt!( le_tag!(LexicalElement::Keyword("nil") => SimpleExp::Nil) | le_tag!(LexicalElement::Keyword("false") => SimpleExp::False) | le_tag!(LexicalElement::Keyword("true") => SimpleExp::True) | le_tag!(LexicalElement::Elipsis => SimpleExp::Elipsis) | le_tag!(LexicalElement::StringLiteral(a) => SimpleExp::StringLiteral(escape_string_literal(a))) | le_tag!(LexicalElement::Number(a) => SimpleExp::Number(parse_number(a))) | do_parse!( tc: call!(TableConstructor::parse) >> (SimpleExp::TableConstructor(Box::new(tc))) ) | do_parse!(px: call!(PrefixExp::parse) >> (SimpleExp::PrefixExp(Box::new(px)))) ) ); impl_parse!( Field, alt!( do_parse!( n: le_tag!(LexicalElement::Identifier(s) => s) >> le_tag!(LexicalElement::Assign) >> v: call!(Exp::parse) >> (Field::NamedExp(n.to_string(), Box::new(v))) ) | do_parse!(v: call!(Exp::parse) >> (Field::Exp(Box::new(v)))) | do_parse!( le_tag!(LexicalElement::OpenSquare) >> v1: call!(Exp::parse) >> le_tag!(LexicalElement::CloseSquare) >> le_tag!(LexicalElement::Assign) >> v2: call!(Exp::parse) >> (Field::IndexExp(Box::new(v1), Box::new(v2))) ) ) ); impl_parse!( TableConstructor, do_parse!( le_tag!(LexicalElement::OpenBrace) >> r: opt!(do_parse!( h: call!(Field::parse) >> r: many0!(do_parse!( call!(FieldSep::parse) >> h: call!(Field::parse) >> (h) )) >> opt!(call!(FieldSep::parse)) >> ({ let mut rv = Vec::new(); let mut r = r; rv.push(h); rv.append(&mut r); rv }) )) >> opt!(call!(FieldSep::parse)) >> le_tag!(LexicalElement::CloseBrace) >> (TableConstructor(match r { Some(v) => v, None => Vec::new(), })) ) ); impl_parse!( NameList, do_parse!( name: le_tag!(LexicalElement::Identifier(a) => a.to_string()) >> rest: many0!(do_parse!( le_tag!(LexicalElement::Comma) >> name: le_tag!(LexicalElement::Identifier(a) => a.to_string()) >> (name) )) >> ({ let mut rv = Vec::new(); let mut rest = rest; rv.push(name); rv.append(&mut rest); NameList(rv) }) ) ); impl_parse!( ExpList, do_parse!( exp: call!(Exp::parse) >> rest: many0!(do_parse!( le_tag!(LexicalElement::Comma) >> exp: call!(Exp::parse) >> (exp) )) >> ({ let mut rv = Vec::new(); let mut rest = rest; rv.push(exp); rv.append(&mut rest); ExpList(rv) }) ) ); impl_parse!( Args, alt!( do_parse!( le_tag!(LexicalElement::OpenParen) >> el: call!(ExpList::parse) >> le_tag!(LexicalElement::CloseParen) >> (Args::ExpList(Box::new(el))) ) | do_parse!( le_tag!(LexicalElement::OpenParen) >> le_tag!(LexicalElement::CloseParen) >> (Args::Empty) ) | do_parse!(tc: call!(TableConstructor::parse) >> (Args::TableConstructor(Box::new(tc)))) | le_tag!(LexicalElement::StringLiteral(a) => (Args::StringLiteral(escape_string_literal(a)))) ) ); impl_parse!( ParList, alt!( le_tag!(LexicalElement::Elipsis => ParList(Vec::new(), true)) | do_parse!( names: call!(NameList::parse) >> elipsis: opt!(do_parse!( le_tag!(LexicalElement::Comma) >> le_tag!(LexicalElement::Elipsis) >> () )) >> ({ let NameList(names) = names; ParList(names, Some(()) == elipsis) }) ) ) ); impl_parse!( FuncName, do_parse!( name: le_tag!(LexicalElement::Identifier(a) => a.to_string()) >> rest: many0!(do_parse!( le_tag!(LexicalElement::Dot) >> name: le_tag!(LexicalElement::Identifier(a) => a.to_string()) >> (name) )) >> last_name: opt!(do_parse!( le_tag!(LexicalElement::Colon) >> name: le_tag!(LexicalElement::Identifier(a) => a) >> (name.to_string()) )) >> ({ let mut rv = Vec::new(); let mut rest = rest; rv.push(name); rv.append(&mut rest); FuncName(rv, last_name) }) ) ); impl_parse!( FuncBody, do_parse!( le_tag!(LexicalElement::OpenParen) >> params: opt!(call!(ParList::parse)) >> le_tag!(LexicalElement::CloseParen) >> chunk: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("end")) >> ({ let params = if let Some(v) = params { v } else { ParList(Vec::new(), false) }; FuncBody(Box::new(params), Box::new(chunk)) }) ) ); impl_parse!( Function, do_parse!( le_tag!(LexicalElement::Keyword("function")) >> fb: call!(FuncBody::parse) >> (Function(fb)) ) ); impl_parse!( NameAndArgs, do_parse!( name: opt!(do_parse!( le_tag!(LexicalElement::Colon) >> name: le_tag!(LexicalElement::Identifier(s) => s) >> (name.to_string()) )) >> args: call!(Args::parse) >> (NameAndArgs(name, args)) ) ); impl_parse!( VarSuffix, alt!( do_parse!( naa: many0!(call!(NameAndArgs::parse)) >> le_tag!(LexicalElement::OpenSquare) >> exp: call!(Exp::parse) >> le_tag!(LexicalElement::CloseSquare) >> (VarSuffix::Index(naa, exp)) ) | do_parse!( naa: many0!(call!(NameAndArgs::parse)) >> le_tag!(LexicalElement::Dot) >> name: le_tag!(LexicalElement::Identifier(s) => s.to_string()) >> (VarSuffix::Member(naa, name)) ) ) ); impl_parse!( Var, alt!( do_parse!( name: le_tag!(LexicalElement::Identifier(s) => s.to_string()) >> vss: many0!(call!(VarSuffix::parse)) >> (Var::Name(name, vss)) ) | do_parse!( le_tag!(LexicalElement::OpenParen) >> exp: call!(Exp::parse) >> le_tag!(LexicalElement::CloseParen) >> vss: many1!(call!(VarSuffix::parse)) >> (Var::Exp(exp, vss)) ) ) ); impl_parse!( VarList, do_parse!( var: call!(Var::parse) >> rest: many0!(do_parse!( le_tag!(LexicalElement::Comma) >> name: call!(Var::parse) >> (name) )) >> ({ let mut rv = Vec::new(); let mut rest = rest; rv.push(var); rv.append(&mut rest); VarList(rv) }) ) ); impl_parse!( Exp, alt!( do_parse!( uo: call!(UnOp::parse) >> ex: call!(Exp::parse) >> (Exp::UnaryOp(uo, Box::new(ex))) ) | do_parse!( e1: call!(SimpleExp::parse) >> bo: call!(BinOp::parse) >> e2: call!(Exp::parse) >> (Exp::BinaryOp(Box::new(Exp::SimpleExp(Box::new(e1))), bo, Box::new(e2))) ) | do_parse!(se: call!(SimpleExp::parse) >> (Exp::SimpleExp(Box::new(se)))) ) ); impl_parse!( VarOrExp, alt!( do_parse!(var: call!(Var::parse) >> (VarOrExp::Var(var))) | do_parse!( le_tag!(LexicalElement::OpenParen) >> exp: call!(Exp::parse) >> le_tag!(LexicalElement::CloseParen) >> (VarOrExp::Exp(exp)) ) ) ); impl_parse!( PrefixExp, do_parse!( voe: call!(VarOrExp::parse) >> naa: many0!(call!(NameAndArgs::parse)) >> (PrefixExp(Box::new(voe), naa)) ) ); impl_parse!( FunctionCall, do_parse!( voe: call!(VarOrExp::parse) >> naa: many1!(call!(NameAndArgs::parse)) >> (FunctionCall(Box::new(voe), naa)) ) ); impl_parse!( Stat, alt!( do_parse!( vl: call!(VarList::parse) >> le_tag!(LexicalElement::Assign) >> el: call!(ExpList::parse) >> (Stat::Assign(Box::new(vl), Box::new(el))) ) | do_parse!(fc: call!(FunctionCall::parse) >> (Stat::FunctionCall(Box::new(fc)))) | do_parse!( le_tag!(LexicalElement::Keyword("do")) >> chunk: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("end")) >> (Stat::DoBlock(Box::new(chunk))) ) | do_parse!( le_tag!(LexicalElement::Keyword("while")) >> exp: call!(Exp::parse) >> le_tag!(LexicalElement::Keyword("do")) >> chunk: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("end")) >> (Stat::WhileBlock(Box::new(exp), Box::new(chunk))) ) | do_parse!( le_tag!(LexicalElement::Keyword("repeat")) >> chunk: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("until")) >> exp: call!(Exp::parse) >> (Stat::RepeatBlock(Box::new(exp), Box::new(chunk))) ) | do_parse!( le_tag!(LexicalElement::Keyword("if")) >> exp1: call!(Exp::parse) >> le_tag!(LexicalElement::Keyword("then")) >> chunk1: call!(Chunk::parse) >> rest: many0!(do_parse!( le_tag!(LexicalElement::Keyword("elseif")) >> exp: call!(Exp::parse) >> le_tag!(LexicalElement::Keyword("then")) >> chunk: call!(Chunk::parse) >> ((exp, chunk)) )) >> else_chunk: opt!(do_parse!( le_tag!(LexicalElement::Keyword("else")) >> ch: call!(Chunk::parse) >> (ch) )) >> le_tag!(LexicalElement::Keyword("end")) >> ({ let mut v = vec![(exp1, chunk1)]; let mut rest = rest; v.append(&mut rest); Stat::IfElseBlock(v, else_chunk.map(|x| Box::new(x))) }) ) | do_parse!( le_tag!(LexicalElement::Keyword("for")) >> name: le_tag!(LexicalElement::Identifier(s) => s.to_string()) >> le_tag!(LexicalElement::Assign) >> e1: call!(Exp::parse) >> le_tag!(LexicalElement::Comma) >> e2: call!(Exp::parse) >> e3: opt!(do_parse!( le_tag!(LexicalElement::Comma) >> e: call!(Exp::parse) >> (e) )) >> le_tag!(LexicalElement::Keyword("do")) >> ch: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("end")) >> ({ let mut ev = vec![e1, e2]; if let Some(v) = e3 { ev.push(v); } Stat::ForRangeBlock(name, ev, Box::new(ch)) }) ) | do_parse!( le_tag!(LexicalElement::Keyword("for")) >> nl: call!(NameList::parse) >> le_tag!(LexicalElement::Keyword("in")) >> el: call!(ExpList::parse) >> le_tag!(LexicalElement::Keyword("do")) >> ch: call!(Chunk::parse) >> le_tag!(LexicalElement::Keyword("end")) >> (Stat::ForInBlock(Box::new(nl), Box::new(el), Box::new(ch))) ) | do_parse!( le_tag!(LexicalElement::Keyword("function")) >> name: call!(FuncName::parse) >> body: call!(FuncBody::parse) >> (Stat::FunctionDec(name, Box::new(body))) ) | do_parse!( le_tag!(LexicalElement::Keyword("local")) >> le_tag!(LexicalElement::Keyword("function")) >> name: le_tag!(LexicalElement::Identifier(s) => s.to_string()) >> body: call!(FuncBody::parse) >> (Stat::LocalFunctionDec(name, Box::new(body))) ) | do_parse!( le_tag!(LexicalElement::Keyword("local")) >> nl: call!(NameList::parse) >> el: opt!(do_parse!( le_tag!(LexicalElement::Assign) >> el: call!(ExpList::parse) >> (el) )) >> (Stat::LocalAssign( Box::new(nl), (if let Some(el) = el { el } else { ExpList(Vec::new()) }) )) ) ) ); // all ok beyond this point impl_parse!( LastStat, alt!( le_tag!(LexicalElement::Keyword("break") => LastStat::Break) | do_parse!( le_tag!(LexicalElement::Keyword("return")) >> es: opt!(call!(ExpList::parse)) >> (match es { Some(es) => LastStat::Return(Box::new(es)), None => LastStat::Return(Box::new(ExpList(Vec::new()))), }) ) ) ); impl_parse!( Chunk, do_parse!( ss: many0!(do_parse!( s: call!(Stat::parse) >> opt!(le_tag!(LexicalElement::Semicolon)) >> (s) )) >> ls: opt!(do_parse!( s: call!(LastStat::parse) >> opt!(le_tag!(LexicalElement::Semicolon)) >> (s) )) >> (Chunk(ss, ls)) ) ); #[derive(Default)] struct ExpBalanceVisitor; macro_rules! move_out { ($p:expr => $e:ident => $me:ident, $b:block) => {{ let mut $me = mem::replace($e, $p); let r = $b; mem::swap(&mut $me, $e); r }}; } use self::LeftOrRight::{Left, Right}; use std::cmp::Ord; use std::cmp::Ordering::*; impl ExpBalanceVisitor { fn move_unary_op_down(&mut self, e: Exp, uo: UnOp) -> Exp { match e { Exp::BinaryOp(e1, bo, e2) => { if UnOp::precedence() > bo.precedence() { Exp::BinaryOp(Box::new(self.move_unary_op_down(*e1, uo)), bo, e2) } else { Exp::UnaryOp(uo, Box::new(Exp::BinaryOp(e1, bo, e2))) } } e => Exp::UnaryOp(uo, Box::new(e)), } } fn descend_binary_op_on_right(&mut self, e: Exp, bo: BinOp, re: Exp) -> Exp { let mut recurse_right = |e, bo, re| match e { Exp::BinaryOp(ile, ibo, ire) => Exp::BinaryOp( ile, ibo, Box::new(self.descend_binary_op_on_right(*ire, bo, re)), ), Exp::UnaryOp(iuo, ire) => { Exp::UnaryOp(iuo, Box::new(self.descend_binary_op_on_right(*ire, bo, re))) } _ => unreachable!(), }; match bo.precedence().cmp(&e.precedence()) { Greater => recurse_right(e, bo, re), Equal => { if bo.associativity() == Right { recurse_right(e, bo, re) } else { Exp::BinaryOp(Box::new(e), bo, Box::new(re)) } } Less => Exp::BinaryOp(Box::new(e), bo, Box::new(re)), } } fn descend_binary_op_on_left(&mut self, e: Exp, bo: BinOp, le: Exp) -> Exp { let mut recurse_left = |e, bo, le| match e { Exp::BinaryOp(ile, ibo, ire) => Exp::BinaryOp( Box::new(self.descend_binary_op_on_left(*ile, bo, le)), ibo, ire, ), e @ Exp::UnaryOp(_, _) => Exp::BinaryOp(Box::new(le), bo, Box::new(e)), _ => unreachable!(), }; match bo.precedence().cmp(&e.precedence()) { Greater => recurse_left(e, bo, le), Equal => { if bo.associativity() == Left { recurse_left(e, bo, le) } else { Exp::BinaryOp(Box::new(le), bo, Box::new(e)) } } Less => Exp::BinaryOp(Box::new(le), bo, Box::new(e)), } } // This is gonna be awful from a performance perspective, but I had no idea // how else to do this fn destructure_left_subtree( &mut self, e: Exp, precedence: u8, ) -> (Exp, Box<FnMut(Exp) -> Exp>) { match e { e @ Exp::SimpleExp(_) | e @ Exp::UnaryOp(_, _) => (e, Box::new(move |e: Exp| e)), Exp::BinaryOp(le, op, re) => { macro_rules! descend_left { () => {{ let mut re = Some(re); let (ie, mut ifn) = self.destructure_left_subtree(*le, precedence); ( ie, Box::new(move |pe: Exp| { Exp::BinaryOp(Box::new(ifn(pe)), op, re.take().unwrap()) }), ) }}; } match op.precedence().cmp(&precedence) { Less => descend_left!(), Equal if op.associativity() == Left => descend_left!(), _ => (Exp::BinaryOp(le, op, re), Box::new(move |e: Exp| e)), } } } } fn destructure_right_subtree( &mut self, e: Exp, precedence: u8, ) -> (Exp, Box<FnMut(Exp) -> Exp>) { match e { e @ Exp::SimpleExp(_) => (e, Box::new(move |e: Exp| e)), Exp::UnaryOp(uo, ie) => match UnOp::precedence().cmp(&precedence) { Greater => { let (ie, mut ifn) = self.destructure_right_subtree(*ie, precedence); ( ie, Box::new(move |pe: Exp| Exp::UnaryOp(uo, Box::new(ifn(pe)))), ) } Equal => unreachable!(), Less => (Exp::UnaryOp(uo, ie), Box::new(move |e: Exp| e)), }, Exp::BinaryOp(le, op, re) => { macro_rules! descend_right { () => {{ let mut le = Some(le); let (ie, mut ifn) = self.destructure_right_subtree(*re, precedence); ( ie, Box::new(move |pe: Exp| { Exp::BinaryOp(le.take().unwrap(), op, Box::new(ifn(pe))) }), ) }}; } match op.precedence().cmp(&precedence) { Less => descend_right!(), Equal if op.associativity() == Right => descend_right!(), _ => (Exp::BinaryOp(le, op, re), Box::new(move |e: Exp| e)), } } } } } impl ASTVisitor<u8> for ExpBalanceVisitor { fn visit_exp(&mut self, e: &mut Exp) -> Option<u8> { let default_exp = || Exp::SimpleExp(Box::new(SimpleExp::Nil)); move_out!(default_exp() => e => me, { let ep = me.precedence(); me = match me { Exp::SimpleExp(mut se) => { self.visit_simple_exp(se.as_mut()); Exp::SimpleExp(se) }, Exp::UnaryOp(op, mut ce) => { self.move_unary_op_down(*ce, op) }, Exp::BinaryOp(mut ce1, op, mut ce2) => { let ce1p = self.visit_exp(ce1.as_mut()).unwrap(); let ce2p = self.visit_exp(ce2.as_mut()).unwrap(); let comparisons = (ep.cmp(&ce1p), ep.cmp(&ce2p)); if comparisons == (Greater, Greater) { let bottom_side = match ce1p.cmp(&ce2p) { Greater => { Left }, Equal => { ce1.associativity().unwrap_or(Left) }, Less => { Right }, }; let (ce1, mut lf) = self.destructure_right_subtree(*ce1, ep); let (ce2, mut rf) = self.destructure_left_subtree(*ce2, ep); let bottom_exp = Exp::BinaryOp(Box::new(ce1), op, Box::new(ce2)); match bottom_side { Left => rf(lf(bottom_exp)), Right => lf(rf(bottom_exp)), } } else { let descend_dir = { match comparisons { (Greater, Greater) => unreachable!(), (Equal, Greater) => Some(Right), (Less, Greater) => Some(Right), (Greater, Equal) => Some(Left), (Equal, Equal) => Some(op.associativity().invert()), (Less, Equal) => if op.associativity() == Left { Some(Right) } else { None }, (Greater, Less) => Some(Left), (Equal, Less) => if op.associativity() == Right { Some(Left) } else { None }, (Less, Less) => None, } }; match descend_dir { Some(Left) => { self.descend_binary_op_on_right(*ce1, op, *ce2) }, Some(Right) => { self.descend_binary_op_on_left(*ce2, op, *ce1) }, None => Exp::BinaryOp(ce1, op, ce2), } } }, }; Some(me.precedence()) }) } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum ParseError { Error, } #[allow(dead_code)] pub fn parse_lua_source(input: &[u8]) -> Result<Chunk, ParseError> { //let tokens = lex::tokenify_string(input).map_err(|_|ParseError::Error)?; let tokens = lex::tokenify_string(input).unwrap(); parse_chunk(&tokens) } #[allow(dead_code)] pub fn parse_chunk(input: &[LexicalElement]) -> Result<Chunk, ParseError> { let (_, mut parsed) = match Chunk::parse(input) { IResult::Done(np, mut p) => (np, p), _ => return Err(ParseError::Error), }; let mut ebv = ExpBalanceVisitor::default(); ebv.visit_chunk(&mut parsed); Ok(parsed) } #[allow(dead_code)] fn parse_exp(input: &[LexicalElement]) -> Result<Exp, ParseError> { let (_, mut parsed) = match Exp::parse(input) { IResult::Done(np, mut p) => (np, p), _ => return Err(ParseError::Error), }; let mut ebv = ExpBalanceVisitor::default(); ebv.visit_exp(&mut parsed); Ok(parsed) } #[cfg(test)] mod tests { use super::*; use ast_types::*; macro_rules! tree { (+, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Plus, Box::new($b)) }; (-, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Minus, Box::new($b)) }; (*, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Mult, Box::new($b)) }; (/, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Div, Box::new($b)) }; (^, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Pow, Box::new($b)) }; (%, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Mod, Box::new($b)) }; (<, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::LessThan, Box::new($b)) }; (>, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::GreaterThan, Box::new($b)) }; (=, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Equals, Box::new($b)) }; (.., $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Concat, Box::new($b)) }; (<=, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::LessEqual, Box::new($b)) }; (>=, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::GreaterEqual, Box::new($b)) }; (!=, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::NotEquals, Box::new($b)) }; (&&, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::And, Box::new($b)) }; (||, $a:expr, $b:expr) => { Exp::BinaryOp(Box::new($a), BinOp::Or, Box::new($b)) }; (-, $e:expr) => { Exp::UnaryOp(UnOp::Neg, Box::new($e)) }; (!, $e:expr) => { Exp::UnaryOp(UnOp::Not, Box::new($e)) }; (#, $e:expr) => { Exp::UnaryOp(UnOp::Len, Box::new($e)) }; ($e:expr) => { Exp::SimpleExp(Box::new(SimpleExp::Number($e))) }; () => { Exp::SimpleExp(Box::new(SimpleExp::Nil)) }; } fn fix_exp_tree(e: &mut Exp) { let mut ebv = ExpBalanceVisitor; ebv.visit_exp(e); } macro_rules! assert_transform { ($a:expr, $b:expr) => {{ let mut a = $a; fix_exp_tree(&mut a); assert_eq!(a, $b); }}; } #[test] fn test_exp_balancer() { //"nil" assert_transform!(tree!(), tree!()); //"# 1.0 + 2.0" assert_transform!( tree!( #, tree!( +, tree!(1.), tree!(2.) ) ), tree!( +, tree!( #, tree!(1.) ), tree!(2.) ) ); //"1.0 || 2.0 + 3.0" assert_transform!( tree!( +, tree!( ||, tree!(1.), tree!(2.) ), tree!(3.) ), tree!( ||, tree!(1.), tree!( +, tree!(2.), tree!(3.) ) ) ); //"-1.0 ^ 2.0" assert_transform!( tree!( ^, tree!( -, tree!(1.) ), tree!(2.) ), tree!( -, tree!( ^, tree!(1.), tree!(2.) ) ) ); //"-1.0 + 2.0" assert_transform!( tree!( -, tree!( +, tree!(1.), tree!(2.) ) ), tree!( +, tree!( -, tree!(1.) ), tree!(2.) ) ); //"1.0 + 2.0 - 3.0" assert_transform!( tree!( +, tree!(1.), tree!( -, tree!(2.), tree!(3.) ) ), tree!( -, tree!( +, tree!(1.), tree!(2.) ), tree!(3.) ) ); //"1.0 + 2.0 .. 3.0 .. 4.0" assert_transform!( tree!( .., tree!( +, tree!(1.), tree!(2.) ), tree!(.., tree!(3.), tree!(4.)) ), tree!( .., tree!( +, tree!(1.), tree!(2.) ), tree!(.., tree!(3.), tree!(4.)) ) ); //"1.0 .. 2.0 .. 3.0" assert_transform!( tree!(.., tree!(.., tree!(1.), tree!(2.)), tree!(3.)), tree!(.., tree!(1.), tree!(.., tree!(2.), tree!(3.))) ); //"1.0 + 2.0 + 3.0" assert_transform!( tree!( +, tree!( +, tree!(1.), tree!(2.) ), tree!(3.) ), tree!( +, tree!( +, tree!(1.), tree!(2.) ), tree!(3.) ) ); //"1.0 + 2.0 + 3.0 + 4.0" assert_transform!( tree!( +, tree!( +, tree!(1.), tree!(2.) ), tree!( +, tree!(3.), tree!(4.) ) ), tree!( +, tree!( +, tree!( +, tree!(1.), tree!(2.) ), tree!(3.) ), tree!(4.) ) ); //"1.0 .. 2.0 .. 3.0 .. 4.0" assert_transform!( tree!( .., tree!(.., tree!(1.), tree!(2.)), tree!(.., tree!(3.), tree!(4.)) ), tree!( .., tree!(1.), tree!(.., tree!(2.), tree!(.., tree!(3.), tree!(4.))) ) ); //"- 1.0 ^ 2.0 ^ 3.)" assert_transform!( tree!( ^, tree!( -, tree!(1.) ), tree!( ^, tree!(2.), tree!(3.) ) ), tree!( -, tree!( ^, tree!(1.), tree!( ^, tree!(2.), tree!(3.) ) ) ) ); //"1.0 + 2.0 ^ 3.0 + 4.0" assert_transform!( tree!( ^, tree!( +, tree!(1.), tree!(2.) ), tree!( +, tree!(3.), tree!(4.) ) ), tree!( +, tree!( +, tree!(1.), tree!( ^, tree!(2.), tree!(3.) ) ), tree!(4.) ) ); //"1.0 .. 2.0 ^ 3.0 .. 4.0" assert_transform!( tree!( ^, tree!( .., tree!(1.), tree!(2.) ), tree!( .., tree!(3.), tree!(4.) ) ), tree!( .., tree!(1.), tree!( .., tree!( ^, tree!(2.), tree!(3.) ), tree!(4.) ) ) ); //"1.0 * 2.0 ^ 3.0 + 4.0" assert_transform!( tree!( ^, tree!( *, tree!(1.), tree!(2.) ), tree!( +, tree!(3.), tree!(4.) ) ), tree!( +, tree!( *, tree!(1.), tree!( ^, tree!(2.), tree!(3.) ) ), tree!(4.) ) ); //"1.0 + 2.0 ^ 3.0 * 4.0" assert_transform!( tree!( ^, tree!( +, tree!(1.), tree!(2.) ), tree!( *, tree!(3.), tree!(4.) ) ), tree!( +, tree!(1.), tree!( *, tree!( ^, tree!(2.), tree!(3.) ), tree!(4.) ) ) ); } #[test] fn test_parse_table_constructor() { { let input = lex::tokenify_string(b"5").unwrap(); let field = Field::parse(&input).unwrap().1; assert_eq!( field, Field::Exp(Box::new(Exp::SimpleExp(Box::new(SimpleExp::Number(5.0))))) ); } { let input = lex::tokenify_string(b"[5] = 5").unwrap(); let field = Field::parse(&input).unwrap().1; assert_eq!( field, Field::IndexExp( Box::new(Exp::SimpleExp(Box::new(SimpleExp::Number(5.0)))), Box::new(Exp::SimpleExp(Box::new(SimpleExp::Number(5.0)))) ) ); } { let input = lex::tokenify_string(b"a = 5").unwrap(); let field = Field::parse(&input).unwrap().1; assert_eq!( field, Field::NamedExp( "a".to_string(), Box::new(Exp::SimpleExp(Box::new(SimpleExp::Number(5.0)))) ) ); } { let input = lex::tokenify_string(b"local a = {a = 5}").unwrap(); let chunk = parse_chunk(&input).unwrap(); let expected = Chunk( vec![Stat::LocalAssign( Box::new(NameList(vec![format!("a")])), ExpList(vec![Exp::SimpleExp(Box::new(SimpleExp::TableConstructor( Box::new(TableConstructor(vec![Field::NamedExp( "a".to_string(), Box::new(Exp::SimpleExp(Box::new(SimpleExp::Number(5.0)))), )])), )))]), )], None, ); assert_eq!(chunk, expected); } } #[test] fn test_parser() { { let input = b"4.0 + - nil ^ nil"; let input = lex::tokenify_string(&input[..]).unwrap(); let (_, output) = Exp::parse(&input).unwrap(); let expected = tree!( +, tree!(4.0), tree!( -, tree!( ^, tree!(), tree!() ) ) ); assert_eq!(expected, output); } { let input = lex::tokenify_string(b"a = # 4 ^ 3").unwrap(); { let (_, output) = Chunk::parse(&input).unwrap(); println!("Before exp rotations: {:?}", output); let output = parse_chunk(&input).unwrap(); println!("After exp rotations: {:?}", output); } } { let (_, exp) = Exp::parse(&lex::tokenify_string(b"4 + 4 ^ 4 + 4").unwrap()).unwrap(); assert_eq!( exp, tree!(+, tree!(4.0), tree!(^, tree!(4.0), tree!(+, tree!(4.0), tree!(4.0)))) ); } //use std::mem::size_of; //println!("UnOp: {}" , size_of::<UnOp >()); //println!("BinOp: {}" , size_of::<BinOp >()); //println!("FieldSep: {}" , size_of::<FieldSep >()); //println!("SimpleExp: {}" , size_of::<SimpleExp >()); //println!("PrefixExp: {}" , size_of::<PrefixExp >()); //println!("Exp: {}" , size_of::<Exp >()); //println!("Field: {}" , size_of::<Field >()); //println!("TableConstructor: {}" , size_of::<TableConstructor>()); //println!("NameList: {}" , size_of::<NameList >()); //println!("ParList: {}" , size_of::<ParList >()); //println!("FuncName: {}" , size_of::<FuncName >()); //println!("ExpList: {}" , size_of::<ExpList >()); //println!("Args: {}" , size_of::<Args >()); //println!("Function: {}" , size_of::<Function >()); //println!("FuncBody: {}" , size_of::<FuncBody >()); //println!("FunctionCall: {}" , size_of::<FunctionCall >()); //println!("Var: {}" , size_of::<Var >()); //println!("VarList: {}" , size_of::<VarList >()); //println!("Stat: {}" , size_of::<Stat >()); //println!("LastStat: {}" , size_of::<LastStat >()); //println!("Chunk: {}" , size_of::<Chunk >()); //println!("NameAndArgs: {}" , size_of::<NameAndArgs >()); //println!("VarSuffix: {}" , size_of::<VarSuffix >()); //println!("VarOrExp: {}" , size_of::<VarOrExp >()); } }
//! Tests invoking an API defined in a custom backend. use bonsaidb::{ client::{url::Url, Client}, core::{ custom_api::CustomApi, permissions::{Actionable, Dispatcher, Permissions}, test_util::{Basic, TestDirectory}, }, server::{Backend, Configuration, ConnectedClient, CustomServer}, }; use serde::{Deserialize, Serialize}; #[derive(Debug, Dispatcher)] #[dispatcher(input = CustomRequest)] struct CustomBackend; impl Backend for CustomBackend { type CustomApi = Self; type CustomApiDispatcher = Self; fn dispatcher_for( _server: &CustomServer<Self>, _client: &ConnectedClient<Self>, ) -> Self::CustomApiDispatcher { CustomBackend } } impl CustomApi for CustomBackend { type Request = CustomRequest; type Response = CustomResponse; } #[derive(Serialize, Deserialize, Debug, Actionable)] enum CustomRequest { #[actionable(protection = "none")] Ping, } #[derive(Serialize, Deserialize, Debug, Clone)] enum CustomResponse { Pong, } #[tokio::test] async fn custom_api() -> anyhow::Result<()> { let dir = TestDirectory::new("custom_api.bonsaidb"); let server = CustomServer::<CustomBackend>::open( dir.as_ref(), Configuration { default_permissions: Permissions::allow_all(), ..Configuration::default() }, ) .await?; server .install_self_signed_certificate("test", false) .await?; let certificate = server.certificate().await?; server.register_schema::<Basic>().await?; tokio::spawn(async move { server.listen_on(12346).await }); let client = Client::build(Url::parse("bonsaidb://localhost:12346")?) .with_custom_api::<CustomBackend>() .with_certificate(certificate) .finish() .await?; let CustomResponse::Pong = client.send_api_request(CustomRequest::Ping).await?; Ok(()) } impl CustomRequestDispatcher for CustomBackend { type Output = CustomResponse; type Error = anyhow::Error; } #[actionable::async_trait] impl PingHandler for CustomBackend { async fn handle(&self, _permissions: &Permissions) -> Result<CustomResponse, anyhow::Error> { Ok(CustomResponse::Pong) } }
//! `Small Box` optimization: store small item on stack and fallback to heap for large item. //! //! # Usage //! //! First, add the following to your `Cargo.toml`: //! //! ```toml //! [dependencies] //! smallbox = "0.8" //! ``` //! //! Next, add this to your crate root: //! //! ```rust //! extern crate smallbox; //! ``` //! //! If you want this crate to work with dynamic-sized type, you can request it via: //! //! ```toml //! [dependencies] //! smallbox = { version = "0.8", features = ["coerce"] } //! ``` //! //! Currently `smallbox` by default links to the standard library, but if you would //! instead like to use this crate in a `#![no_std]` situation or crate, you can request this via: //! //! ```toml //! [dependencies.smallbox] //! version = "0.8" //! features = ["coerce"] //! default-features = false //! ``` //! //! //! # Feature Flags //! //! This crate has the following cargo feature flags: //! //! - `std` //! - Optional, enabled by default //! - Use libstd //! - If `std` feature flag is opted out, `alloc` crate //! will be linked, which requires nightly rust. //! //! - `coerce` //! - Optional //! - Require nightly rust //! - Allow automatic coersion from sized `SmallBox` to unsized `SmallBox`. //! //! //! # Unsized Type //! //! There are two ways to have an unsized `SmallBox`: Using `smallbox!()` macro or coercing from a sized `SmallBox` instance. //! //! Using the `smallbox!()` macro is the only option on stable rust. This macro will check the types of the expression and //! the expected type `T`. For any invalid type coersions, this macro invokes a compiler error. //! //! Once the feature `coerce` is enabled, sized `SmallBox<T>` can be coerced into `SmallBox<T: ?Sized>` if necessary. //! //! # Example //! //! Eliminate heap alloction for small items by `SmallBox`: //! //! ```rust //! use smallbox::SmallBox; //! use smallbox::space::S4; //! //! let small: SmallBox<_, S4> = SmallBox::new([0; 2]); //! let large: SmallBox<_, S4> = SmallBox::new([0; 32]); //! //! assert_eq!(small.len(), 2); //! assert_eq!(large.len(), 32); //! //! assert_eq!(*small, [0; 2]); //! assert_eq!(*large, [0; 32]); //! //! assert!(small.is_heap() == false); //! assert!(large.is_heap() == true); //! ``` //! //! ## Unsized type //! //! Construct with `smallbox!()` macro: //! //! ```rust //! #[macro_use] //! extern crate smallbox; //! //! # fn main() { //! use smallbox::SmallBox; //! use smallbox::space::*; //! //! let array: SmallBox<[usize], S2> = smallbox!([0usize, 1]); //! //! assert_eq!(array.len(), 2); //! assert_eq!(*array, [0, 1]); //! # } //! ``` //! //! With `coerce` feature: //! //! ```rust //! # #[cfg(feature = "coerce")] //! # { //! use smallbox::SmallBox; //! use smallbox::space::*; //! //! let array: SmallBox<[usize], S2> = SmallBox::new([0usize, 1]); //! //! assert_eq!(array.len(), 2); //! assert_eq!(*array, [0, 1]); //! # } //! ``` //! //! `Any` downcasting: //! //! ```rust //! #[macro_use] //! extern crate smallbox; //! //! # fn main() { //! use std::any::Any; //! use smallbox::SmallBox; //! use smallbox::space::S2; //! //! let num: SmallBox<dyn Any, S2> = smallbox!(1234u32); //! //! if let Some(num) = num.downcast_ref::<u32>() { //! assert_eq!(*num, 1234); //! } else { //! unreachable!(); //! } //! # } //! ``` //! //! //! # Capacity //! //! The capacity is expressed by the size of type parameter `Space`, //! regardless of what actually the `Space` is. //! //! The crate provides some spaces in module `smallbox::space`, //! from `S1`, `S2`, `S4` to `S64`, representing `"n * usize"` spaces. //! //! Anyway, you can defind your own space type //! such as byte array `[u8; 64]`. //! Please note that the space alignment is also important. If the alignment //! of the space is smaller than the alignment of the value, the value //! will be stored in the heap. #![cfg_attr(feature = "coerce", feature(unsize, coerce_unsized))] extern crate alloc; extern crate core as std; mod smallbox; pub mod space; pub use crate::smallbox::SmallBox;
use std::collections::VecDeque; use failure::Error; fn rotate_cw(circle: &mut VecDeque<usize>, n: usize) { for _ in 0..n { let marble = circle.pop_front().unwrap(); circle.push_back(marble); } } fn rotate_ccw(circle: &mut VecDeque<usize>, n: usize) { for _ in 0..n { let marble = circle.pop_back().unwrap(); circle.push_front(marble); } } /// Run the game with @p players until marble @p last_marble. fn game_vec(players: usize, last_marble: usize) { // 100x larger eh? That's cute. // We'll use a VecDeque and keep the "current" marble at index 0. let mut circle: VecDeque<usize> = vec![0].into_iter().collect(); let mut next_marble = 1; let mut scores = vec![0; players]; let mut player = 0; while next_marble <= last_marble { match next_marble % 23 { 0 => { rotate_ccw(&mut circle, 6); let removed = circle.pop_back().unwrap(); scores[player] += next_marble + removed; } _ => { rotate_cw(&mut circle, 2); circle.push_front(next_marble); } } next_marble += 1; player = (player + 1) % players; } let (winner, high_score) = scores .iter() .enumerate() .max_by_key(|(_, &score)| score) .unwrap(); println!( "{} players; last marble is worth {} points: high score is {} [player {}]", players, last_marble, high_score, winner + 1 ); } fn main() -> Result<(), Error> { game_vec(10, 1618); game_vec(13, 7999); game_vec(17, 1104); game_vec(21, 6111); game_vec(30, 5807); game_vec(468, 71843); game_vec(468, 71843 * 100); Ok(()) }
#![forbid(unsafe_code)] //! Obtains the dependency list from a compiled Rust binary by parsing its panic messages. //! Recovers both crate names and versions. //! //! ## Caveats //! * If the crate never panics, it will not show up. //! The Rust compiler is very good at removing unreachable panics, //! so we can only discover at around a half of all dependencies. //! * C code such as `openssl-src` never shows up, because it can't panic. //! * Only crates installed from a registry are discovered. Crates from local workspace or git don't show up. //! //! # Alternatives //! [`cargo auditable`](https://crates.io/crates/cargo-auditable) embeds the **complete** dependency information //! into binaries, which can then be recovered using [`auditable-info`](https://crates.io/crates/auditable-info). //! It should be used instead of `quitters` whenever possible, unless you're specifically after panics. use std::collections::BTreeSet; use once_cell::sync::OnceCell; use regex::bytes::Regex; use semver::Version; // This regex works suprisingly well. We can even split the crate name and version reliably // because crate names publishable on crates.io cannot contain the `.` character, // which *must* appear in the version string. // Versions like "1" are not valid in Cargo, or under the semver spec. const REGEX_STRING: &str = "(?-u)cargo/registry/src/[^/]+/(?P<crate>[0-9A-Za-z_-]+)-(?P<version>[0-9]+\\.[0-9]+\\.[0-9]+[0-9A-Za-z+.-]*)/"; // Compiled regular expressions use interior mutability and may cause contention // in heavily multi-threaded workloads. This should not be an issue here // because we only use `.captures_iter()`, which acquires the mutable state // only once per invocation and for a short amount of time: // https://github.com/rust-lang/regex/blob/0d0023e412f7ead27b0809f5d2f95690d0f0eaef/PERFORMANCE.md#using-a-regex-from-multiple-threads // This could be refactored into cloning in case it *does* end up being a bottleneck in practice, // which would sacrifice ergonomics. static REGEX_UNIX: OnceCell<Regex> = OnceCell::new(); static REGEX_WINDOWS: OnceCell<Regex> = OnceCell::new(); /// Obtains the dependency list from a compiled Rust binary by parsing its panic messages. /// /// ## Caveats /// * If the crate never panics, it will not show up. /// The Rust compiler is very good at removing unreachable panics, /// so we can only discover at around a half of all dependencies. /// * C code such as `openssl-src` never shows up, because it can't panic. /// * Only crates installed from a registry are discovered. Crates from local workspace or git don't show up. /// /// ## Usage /// ```rust,ignore /// let file = std::fs::read("target/release/my-program")?; /// let versions = quitters::versions(&file); /// for (krate, version) in versions.iter() { /// println!("{krate} v{version}") /// } /// ``` pub fn versions(data: &[u8]) -> BTreeSet<(&str, Version)> { // You might think that just making two functions, versions_unix and versions_windows // and then calling the appropriate function for your platform would be faster, // since \ paths cannot be used on Unix. I briefly thought so! // However, cross-compilation from Windows to Unix would put \ paths into a Unix binary. // So that optimization would miss cross-compiled binaries. // It only gets you a 20% reduction in runtime because the I/O dominates anyway. // // A significant optimization to tackle the I/O problem would be only ever reading things // into the CPU cache as opposed to loading the entire file to memory. // Basically streaming the data. This requires special handling of the start and end, // so either needs a state-machine-based parser like nom or capping the possible match length. // The latter is doable but only makes sense if it turns out that the current approach is too slow. // This lint warns about a unicode-related panic, but we use a non-unicode-aware mode #[allow(clippy::invalid_regex)] let re = REGEX_UNIX.get_or_init(|| Regex::new(REGEX_STRING).unwrap()); let versions = versions_for_regex(data, re); if !versions.is_empty() { versions } else { // Sadly the single-pass RegexSet only lets you check for presence of matches, // and doesn't let you find out where they are. // And using a composite regex like `unix_regex|windows_regex` is as slow as two passes, // so we'll just use two passes. That's what Regex crate documentation recommends, too. let re = REGEX_WINDOWS.get_or_init(|| { let windows_regex = REGEX_STRING.replace('/', "\\\\"); Regex::new(&windows_regex).unwrap() }); versions_for_regex(data, re) } } fn versions_for_regex<'a>(data: &'a [u8], re: &Regex) -> BTreeSet<(&'a str, Version)> { let mut versions = BTreeSet::new(); for c in re.captures_iter(data) { if let Some(parsed) = parse_capture(c) { versions.insert(parsed); } } versions } /// Extracts crate and version from a single regex match fn parse_capture(c: regex::bytes::Captures) -> Option<(&str, Version)> { Some(( std::str::from_utf8(c.name("crate").unwrap().as_bytes()).ok()?, Version::parse(std::str::from_utf8(c.name("version").unwrap().as_bytes()).ok()?).ok()?, )) } #[cfg(test)] mod tests { use super::*; #[test] fn two_crates_one_line() { let data = b"\x7FELF/cargo/registry/src/github.com-1ecc6299db9ec823/xz2-0.1.6/src/stream.rsunknown return code: lzma data errorNoCheckProgramMemFormatOptionszstd returned null pointer when creating new context/cargo/registry/src/github.com-1ecc6299db9ec823/zstd-safe-5.0.2+zstd.1.5.2/src/lib.rsbad error message from zstdGiven position outside of the buffer bounds."; assert_eq!(versions(data).len(), 2); } #[test] fn complex_versions() { for version_suffix in [ "", "+foobar", "+Fo0bar", "+zstd.1.5.2", "-rc", "-alpha.1", "-alpha.1+zstd.1.5.2", ] { let string = format!("new context/cargo/registry/src/github.com-1ecc6299db9ec823/zstd-safe-5.0.2{}/src/lib.rsbad error message from zstdGiven position outside of the buffer bounds.", version_suffix); let expected_version = format!("5.0.2{}", version_suffix); assert!(versions(string.as_bytes()) .contains(&("zstd-safe", Version::parse(&expected_version).unwrap()))); } } #[test] fn windows_matching() { let data = br"C:\Users\runneradmin\.cargo\registry\src\github.com-1ecc6299db9ec823\rustc-demangle-0.1.21\src\legacy.rs"; assert!(versions(data).contains(&("rustc-demangle", Version::parse("0.1.21").unwrap()))) } }
use std::collections::HashMap; use super::common::{Instruction, Value}; fn get_value(map: &HashMap<char, isize>, value: &Value) -> isize { match value { &Value::Raw(ref val) => *val, &Value::Register(register) => *map.get(&register).unwrap_or(&0), } } pub fn parse(data: &str) -> isize { let instructions: Vec<_> = data.lines() .map(|line| line.parse::<Instruction>().unwrap()) .collect(); let mut map: HashMap<char, isize> = "abcdefghijklmnopqrstuvwxyz".chars() .map(|chr| (chr, 0)).collect(); let mut last_freq: Option<isize> = None; let mut idx = 0; loop { if idx > instructions.len() { break; } match instructions[idx] { Instruction::Add(source, ref value) => { let val = get_value(&map, &value); let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let entry = map.entry(register).or_insert(0); (*entry) += val; idx += 1; } Instruction::Mul(source, ref value) => { let val = get_value(&map, &value); let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let entry = map.entry(register).or_insert(0); (*entry) *= val; idx += 1; }, Instruction::Mod(source, ref value) => { let val = get_value(&map, &value); let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let entry = map.entry(register).or_insert(0); (*entry) = (*entry % val + val) % val; idx += 1; }, Instruction::Set(source, ref value) => { let val = get_value(&map, &value); let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let entry = map.entry(register).or_insert(0); (*entry) = val; idx += 1; }, Instruction::Jgz(source, ref value) => { let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let cmp = *map.get(&register).unwrap_or(&0); if cmp > 0 { let val = get_value(&map, &value); if val > 0 { idx += val as usize; } else { idx -= val.abs() as usize; } } else { idx += 1; } }, Instruction::Snd(source) => { let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let freq = map.get(&register).unwrap_or(&0); last_freq = Some(*freq); idx += 1; }, Instruction::Rcv(source) => { let register = match source { Value::Register(reg) => reg, _ => unreachable!(), }; let entry = map.entry(register).or_insert(0); if *entry != 0 { return last_freq.unwrap(); } idx += 1; }, } } 0 } #[cfg(test)] mod tests { use super::parse; #[test] fn day18_part1_test1() { let input = "set a 1 add a 2 mul a a mod a 5 snd a set a 0 rcv a jgz a -1 set a 1 jgz a -2"; assert_eq!(4, parse(input)); } }
//! All new handlers should be declared in this module mod register; pub mod game; pub mod packet; pub use self::register::register;
use proc_macro::TokenStream; use quote::{quote, ToTokens}; use std::collections::HashSet; use std::hash::Hash; use syn::export::TokenStream2; use syn::parse::{Parse, ParseBuffer}; use syn::parse_macro_input; use syn::Error; use yew_router_route_parser::{CaptureVariant, MatcherToken}; struct S { /// The routing string s: String, case_insensitive: bool, incomplete: bool, strict: bool, } /// Custom keywords mod kw { syn::custom_keyword!(CaseInsensitive); syn::custom_keyword!(Incomplete); syn::custom_keyword!(Strict); } /// Collects 0 or more results from the parse_options. /// It prevents parsing the same token(s) (as specified in the vector of parsers) twice. fn many_unordered<T: Eq + Hash>( input: &ParseBuffer, mut parse_options: Vec<&dyn Fn(&ParseBuffer) -> Option<T>>, ) -> HashSet<T> { let mut collected = HashSet::new(); while !parse_options.is_empty() { let mut inserted = false; 'x: for (index, f) in parse_options.iter().enumerate() { if let Some(keyword) = (f)(&input) { collected.insert(keyword); let _ = parse_options.remove(index); inserted = true; break 'x; // must break to make borrow checker approve the implicit mut borrow needed for remove. } } if !inserted { break; } } collected } impl Parse for S { fn parse(input: &ParseBuffer) -> Result<Self, Error> { let s = input.parse::<syn::LitStr>()?; #[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)] enum Keyword { CaseInsensitive, Incomplete, Strict, } let collected: HashSet<Keyword> = many_unordered( input, vec![ &|input| input.parse::<kw::Strict>().ok().map(|_| Keyword::Strict), &|input| { input .parse::<kw::Incomplete>() .ok() .map(|_| Keyword::Incomplete) }, &|input| { input .parse::<kw::CaseInsensitive>() .ok() .map(|_| Keyword::CaseInsensitive) }, ], ); let incomplete = collected.contains(&Keyword::Incomplete); let strict = collected.contains(&Keyword::Strict); let case_insensitive = collected.contains(&Keyword::CaseInsensitive); Ok(S { s: s.value(), case_insensitive, incomplete, strict, }) } } /// Expected to be used like: route!("/route/to/thing" => Component) pub fn route_impl(input: TokenStream) -> TokenStream { let s: S = parse_macro_input!(input as S); let input: String = s.s; // Do the parsing at compile time so the user knows if their matcher is malformed. // It will still be their responsibility to know that the corresponding Props can be acquired from a path matcher. let t = yew_router_route_parser::parse_str_and_optimize_tokens(input.as_str(), !s.strict) .expect("Invalid Path Matcher") .into_iter() .map(ShadowMatcherToken::from); let complete = !s.incomplete; // by default, complete is on. let strict = s.strict; let case_insensitive = s.case_insensitive; let expanded = quote! { { let settings = ::yew_router::matcher::route_matcher::MatcherSettings { strict: #strict, /// A matcher must consume all of the input to succeed. complete: #complete, /// All literal matches do not care about case. case_insensitive: #case_insensitive }; ::yew_router::matcher::Matcher::from( ::yew_router::matcher::route_matcher::RouteMatcher { tokens : vec![#(#t),*], settings } ) } }; TokenStream::from(expanded) } impl ToTokens for ShadowMatcherToken { fn to_tokens(&self, ts: &mut TokenStream2) { use ShadowMatcherToken as SOT; let t: TokenStream2 = match self { SOT::Exact(s) => quote! { ::yew_router::matcher::MatcherToken::Exact(#s.to_string()) }, SOT::Capture(variant) => quote! { ::yew_router::matcher::MatcherToken::Capture(#variant) }, SOT::Optional(optional) => quote! { ::yew_router::matcher::MatcherToken::Optional(vec![#(#optional),*]) }, }; ts.extend(t) } } /// A shadow of the OptimizedToken type. /// It should match it exactly so that this macro can expand to the original. enum ShadowMatcherToken { Exact(String), Capture(ShadowCaptureVariant), Optional(Vec<ShadowMatcherToken>), } enum ShadowCaptureVariant { Unnamed, // {} - matches anything ManyUnnamed, // {*} - matches over multiple sections NumberedUnnamed { sections: usize }, // {4} - matches 4 sections Named(String), // {name} - captures a section and adds it to the map with a given name ManyNamed(String), // {*:name} - captures over many sections and adds it to the map with a given name. NumberedNamed { sections: usize, name: String }, // {2:name} - captures a fixed number of sections with a given name. } impl ToTokens for ShadowCaptureVariant { fn to_tokens(&self, ts: &mut TokenStream2) { let t = match self { ShadowCaptureVariant::Unnamed => { quote! {::yew_router::matcher::CaptureVariant::Unnamed} } ShadowCaptureVariant::ManyUnnamed => { quote! {::yew_router::matcher::CaptureVariant::ManyUnnamed} } ShadowCaptureVariant::NumberedUnnamed { sections } => { quote! {::yew_router::matcher::CaptureVariant::NumberedUnnamed{#sections}} } ShadowCaptureVariant::Named(name) => { quote! {::yew_router::matcher::CaptureVariant::Named(#name.to_string())} } ShadowCaptureVariant::ManyNamed(name) => { quote! {::yew_router::matcher::CaptureVariant::ManyNamed(#name.to_string())} } ShadowCaptureVariant::NumberedNamed { sections, name } => { quote! {::yew_router::matcher::CaptureVariant::NumberedNamed{#sections, #name.to_string()}} } }; ts.extend(t) } } impl From<MatcherToken> for ShadowMatcherToken { fn from(ot: MatcherToken) -> Self { use MatcherToken as MT; use ShadowMatcherToken as SOT; match ot { MT::Exact(s) => SOT::Exact(s), MT::Capture(variant) => SOT::Capture(variant.into()), MT::Optional(optional) => SOT::Optional(optional.into_iter().map(SOT::from).collect()), } } } impl From<CaptureVariant> for ShadowCaptureVariant { fn from(cv: CaptureVariant) -> Self { use CaptureVariant as CV; use ShadowCaptureVariant as SCV; match cv { CV::Unnamed => SCV::Unnamed, CaptureVariant::ManyUnnamed => SCV::ManyUnnamed, CaptureVariant::NumberedUnnamed { sections } => SCV::NumberedUnnamed { sections }, CaptureVariant::Named(name) => SCV::Named(name), CaptureVariant::ManyNamed(name) => SCV::ManyNamed(name), CaptureVariant::NumberedNamed { sections, name } => { SCV::NumberedNamed { sections, name } } } } }
extern crate ez_pixmap; extern crate fltk; use fltk::{enums::*, prelude::*, *}; const PXM: &[&str] = &[ "50 34 4 1", " c black", "o c #ff9900", "@ c white", "# c None", "##################################################", "### ############################## ####", "### ooooo ########################### ooooo ####", "### oo oo ######################### oo oo ####", "### oo oo ####################### oo oo ####", "### oo oo ##################### oo oo ####", "### oo oo ################### oo oo ####", "### oo oo oo oo ####", "### oo oo ooooooooooooooo oo oo ####", "### oo ooooooooooooooooooooo oo ####", "### oo ooooooooooooooooooooooooooo ooo ####", "#### oo ooooooo ooooooooooooo ooooooo oo #####", "#### oo oooooooo ooooooooooooo oooooooo oo #####", "##### oo oooooooo ooooooooooooo oooooooo oo ######", "##### o ooooooooooooooooooooooooooooooo o ######", "###### ooooooooooooooooooooooooooooooooooo #######", "##### ooooooooo ooooooooo ooooooooo ######", "##### oooooooo @@@ ooooooo @@@ oooooooo ######", "##### oooooooo @@@@@ ooooooo @@@@@ oooooooo ######", "##### oooooooo @@@@@ ooooooo @@@@@ oooooooo ######", "##### oooooooo @@@ ooooooo @@@ oooooooo ######", "##### ooooooooo ooooooooo ooooooooo ######", "###### oooooooooooooo oooooooooooooo #######", "###### oooooooo@@@@@@@ @@@@@@@oooooooo #######", "###### ooooooo@@@@@@@@@ @@@@@@@@@ooooooo #######", "####### ooooo@@@@@@@@@@@ @@@@@@@@@@@ooooo ########", "######### oo@@@@@@@@@@@@ @@@@@@@@@@@@oo ##########", "########## o@@@@@@ @@@@@ @@@@@ @@@@@@o ###########", "########### @@@@@@@ @ @@@@@@@ ############", "############ @@@@@@@@@@@@@@@@@@@@@ #############", "############## @@@@@@@@@@@@@@@@@ ###############", "################ @@@@@@@@@ #################", "#################### #####################", "##################################################", ]; fn main() -> Result<(), Box<dyn std::error::Error>> { let my_image = ez_pixmap::RgbaImage::from(PXM)?; let app = app::App::default(); let mut win = window::OverlayWindow::default().with_size(50, 34); let mut frame = frame::Frame::default().size_of(&win); win.end(); win.show(); frame.draw(move || { draw::draw_image(my_image.data(), 0, 0, 50, 34, ColorDepth::Rgba8).unwrap(); }); app.run().unwrap(); Ok(()) }
/*! This library provides field accessor traits,and emulation of structural types. # Features These are the features this library provides: - [Derivation of the 3 accessor traits for every public field](./docs/structural_macro/index.html) (GetField/GetFieldMut/IntoField). - [Declaration of trait aliases for accessor trait bounds,using field-in-trait syntax. ](./macro.structural_alias.html). - [Construction of anonymous structs with make_struct](./macro.make_struct.html) # Examples ### Structural Derive This demonstrates how you can use any type with a superset of the fields of another one in a function. For details on the [Structural derive macro look here](./docs/structural_macro/index.html). ```rust use structural::{GetFieldExt,Structural,fp}; fn reads_point4<S>(point:&S) where // The `Structural` derive generated the `Point3D_SI` trait for `Point3D`, // aliasing the accessor traits for it. S:Point3D_SI<u32> { let (a,b,c)=point.fields(fp!( x, y, z )); assert_eq!(a,&0); assert_eq!(b,&11); assert_eq!(c,&33); } fn main(){ reads_point4(&Point3D { x: 0, y: 11, z: 33 }); reads_point4(&Point4D { x: 0, y: 11, z: 33, a: 0xDEAD, }); reads_point4(&Point5D { x: 0, y: 11, z: 33, a: 0xDEAD, b: 0xBEEF, }); } #[derive(Structural)] // Using the `#[struc(public)]` attribute tells the derive macro to // generate the accessor trait impls for non-`pub` fields. #[struc(public)] struct Point3D<T>{ x:T, y:T, z:T, } #[derive(Structural)] // By default only public fields get accessor trait impls, // using `#[struc(public)]` you can have impls to access private fields. #[struc(public)] struct Point4D<T>{ x:T, y:T, z:T, a:T, } #[derive(Structural)] struct Point5D<T>{ pub x:T, pub y:T, pub z:T, pub a:T, pub b:T, } ``` ### Structural alias This demonstrates how you can define a trait aliasing field accessors, using a fields-in-traits syntax. For more details you can look at the docs for the [`structural_alias`](./macro.structural_alias.html) macro. ```rust use structural::{GetFieldExt,Structural,structural_alias,fp}; use std::borrow::Borrow; structural_alias!{ trait Person<H:House>{ name:String, house:H, } trait House{ dim:Dimension3D, } } fn print_name<T,H>(this:&T) where T:?Sized+Person<H>, H:House, { let (name,house_dim)=this.fields(fp!( name, house.dim )); println!("Hello, {}!", name); let (w,h,d)=house_dim.fields(fp!( width, height, depth )); if w*h*d >= 1_000_000 { println!("Your house is enormous."); }else{ println!("Your house is normal sized."); } } // most structural aliases are object safe fn print_name_dyn<H>(this:&dyn Person<H>) where H:House, { print_name(this) } #[derive(Structural)] #[struc(public)] struct Dimension3D{ width:u32, height:u32, depth:u32, } ////////////////////////////////////////////////////////////////////////// //// The stuff here could be defined in a separate crate fn main(){ let worker=Worker{ name:"John Doe".into(), salary:Cents(1_000_000_000_000_000), house:Mansion{ dim:Dimension3D{ width:300, height:300, depth:300, }, money_vault_location:"In the basement".into(), } }; let student=Student{ name:"Jake English".into(), birth_year:1995, house:SmallHouse{ dim:Dimension3D{ width:30, height:30, depth:30, }, residents:10, } }; print_name(&worker); print_name(&student); print_name_dyn(&worker); print_name_dyn(&student); } #[derive(Structural)] // Using the `#[struc(public)]` attribute tells the derive macro to // generate the accessor trait impls for non-`pub` fields. #[struc(public)] struct Worker{ name:String, salary:Cents, house:Mansion, } #[derive(Structural)] #[struc(public)] struct Student{ name:String, birth_year:u32, house:SmallHouse, } # #[derive(Debug,Copy,Clone,PartialEq,Eq)] # struct Cents(u64); #[derive(Structural)] #[struc(public)] struct Mansion{ dim:Dimension3D, money_vault_location:String, } #[derive(Structural)] #[struc(public)] struct SmallHouse{ dim:Dimension3D, residents:u32, } ``` ### Anonymous structs (`make_struct` macro) This demonstrates how you can construct an anonymous struct. For more details you can look at the docs for the [`make_struct`](./macro.make_struct.html) macro. ```rust use structural::{GetFieldExt,make_struct,structural_alias,fp}; structural_alias!{ trait Person<T>{ // We only have shared access (`&String`) to the field. ref name:String, // We have shared,mutable,and by value access to the field. // Not specifying any of `mut`/`ref`/`move` is equivalent to `mut move value:T,` value:T, } } fn make_person(name:String)->impl Person<()> { make_struct!{ name, value: (), } } fn print_name<T>(mut this:T) where T:Person<Vec<String>>, { println!("Hello, {}!",this.field_(fp!(name)) ); let list=vec!["what".into()]; *this.field_mut(fp!(value))=list.clone(); assert_eq!( this.field_(fp!(value)), &list ); assert_eq!( this.into_field(fp!(value)), list ); } // most structural aliases are object safe fn print_name_dyn(this:&mut dyn Person<Vec<String>>){ println!("Hello, {}!",this.field_(fp!(name)) ); let list=vec!["what".into()]; *this.field_mut(fp!(value))=list.clone(); assert_eq!( this.field_(fp!(value)), &list ); } ////////////////////////////////////////////////////////////////////////// //// The stuff here could be defined in a separate crate fn main(){ let worker=make_struct!{ // This derives clone for the anonymous struct #![derive(Clone)] name:"John Doe".into(), salary:Cents(1_000_000_000_000_000), value:vec![], }; let student=make_struct!{ // This derives clone for the anonymous struct #![derive(Clone)] name:"Jake English".into(), birth_year:1995, value:vec![], }; print_name(worker.clone()); print_name(student.clone()); print_name_dyn(&mut worker.clone()); print_name_dyn(&mut student.clone()); let person=make_person("Louis".into()); assert_eq!( person.field_(fp!(name)), "Louis" ); assert_eq!( person.field_(fp!(value)), &() ); } #[derive(Debug,Copy,Clone,PartialEq,Eq)] struct Cents(u64); ``` */ #![cfg_attr(feature="nightly_impl_fields",feature(associated_type_bounds))] #![cfg_attr(feature="nightly_specialization",feature(specialization))] #![cfg_attr(feature="nightly_better_macros",feature(proc_macro_hygiene))] #![no_std] #[cfg(any( all(feature="alloc",not(feature="rust_1_36")), feature="std", ))] pub extern crate std; #[doc(hidden)] pub extern crate core as std_; #[doc(hidden)] #[cfg(all(feature="alloc",feature="rust_1_36"))] pub extern crate alloc as alloc_; #[doc(hidden)] #[cfg(all(feature="alloc",feature="rust_1_36"))] pub use alloc_ as alloc; #[doc(hidden)] #[cfg(all(feature="alloc",not(feature="rust_1_36")))] pub use std as alloc; extern crate self as structural; pub use structural_derive::Structural; #[doc(hidden)] pub use structural_derive::{ old_fp_impl_, //new_fp_impl_, _field_path_aliases_impl, _FP_impl_, structural_alias_impl, }; #[macro_use] mod macros; pub mod docs; pub mod mut_ref; pub mod field_traits; pub mod structural_trait; pub mod utils; #[cfg(test)] pub mod test_utils; #[cfg(test)] pub mod tests{ mod multi_nested_fields; mod structural_derive; mod structural_alias; mod macro_tests; } pub mod type_level; #[doc(hidden)] pub mod chars; #[doc(inline)] pub use crate::field_traits::GetFieldExt; pub use crate::{ field_traits::{ GetField,GetFieldMut,IntoField,IntoFieldMut, GetFieldType,GetFieldType2,GetFieldType3,GetFieldType4, RevGetFieldType,RevGetFieldType_, }, structural_trait::{Structural,StructuralDyn}, }; /// Reexports from the `core_extensions` crate. pub mod reexports{ pub use core_extensions::{ collection_traits::{Cloned,IntoArray}, type_asserts::AssertEq, MarkerType, SelfOps, TIdentity, TypeIdentity, }; } // pmr(proc macro reexports): // Reexports for the proc macros in structural_derive. // // Importing stuff from this module anywhere other than `structural_derive` is // explicitly disallowed,and is likely to break. #[doc(hidden)] pub mod pmr{ pub use crate::type_level::*; pub use crate::type_level::_private::*; pub use crate::type_level::collection_traits::*; pub use crate::chars::*; pub use core_extensions::{MarkerType,TIdentity,TypeIdentity}; pub use crate::std_::marker::PhantomData; #[cfg(feature="alloc")] pub use crate::alloc::{ boxed::Box, }; } #[cfg(all(test,not(feature="testing")))] compile_error!{ "tests must be run with the \"testing\" feature" }
//! The VAPIX v3 parameters interface at `/axis-cgi/param.cgi`. use crate::*; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; use std::convert::TryFrom; use std::fmt; use std::str::FromStr; /// A device's legacy parameters API. pub struct Parameters<'a, T: Transport>(&'a Client<T>, String); impl<'a, T: Transport> Parameters<'a, T> { pub(crate) fn new(device: &'a Client<T>, api_version: String) -> Self { Self(device, api_version) } /// List parameters, including their definitions and current values. /// /// If `groups` is provided, return a subset of the parameter tree. pub async fn list_definitions(&self, groups: Option<&[&str]>) -> Result<ParameterDefinitions> { let req = http::Request::builder() .method(http::Method::GET) .uri( self.0 .uri_for_args( "/axis-cgi/param.cgi", ListParams { action: "listdefinitions", list_format: Some("xmlschema"), groups, }, ) .unwrap(), ) .body(Vec::new()) .unwrap(); let (_resp, resp_body) = self.0.roundtrip(req, "text/xml").await?; let resp_body = std::str::from_utf8(resp_body.as_slice()).map_err(|_| Error::Other("invalid UTF-8"))?; let params: ParameterDefinitions = quick_xml::de::from_str(resp_body)?; Ok(params) } /// List parameters, including their current values. /// /// If `groups` is provided, return a subset of the parameter tree. pub async fn list(&self, groups: Option<&[&str]>) -> Result<BTreeMap<String, String>> { let req = http::request::Builder::new() .method(http::Method::GET) .uri( self.0 .uri_for_args( "/axis-cgi/param.cgi", ListParams { action: "list", list_format: None, groups, }, ) .unwrap(), ) .body(Vec::new()) .unwrap(); let (_, body) = self.0.roundtrip(req, "text/plain").await?; Ok(body .as_slice() .split(|byte| *byte == b'\n') .filter_map(|line| { let line = std::str::from_utf8(line).unwrap_or(""); let mut parts = line.splitn(2, '='); match (parts.next(), parts.next()) { (Some(key), Some(value)) => Some((key.to_string(), value.to_string())), _ => None, } }) .collect()) } // todo: ?action=add, optional force=yes // The force parameter can be used to exceed limits set for adding dynamic parameter groups. // Example: Axis products can be configured for up to 10 event types. The force parameter can be used to exceed this maximum number of events. // todo action=remove /// Attempt to update one or more parameters. /// /// TODO: what happens with partial failure? pub async fn update<I: IntoIterator<Item = (K, V)>, K: AsRef<str>, V: AsRef<str>>( &self, parameters: I, ) -> Result<()> { let mut query_params: BTreeMap<String, String> = parameters .into_iter() .map(move |(k, v)| (k.as_ref().to_string(), v.as_ref().to_string())) .collect(); query_params.insert("action".into(), "update".into()); assert!(!query_params.is_empty()); let req = http::request::Builder::new() .method(http::Method::GET) .uri( self.0 .uri_for_args("/axis-cgi/param.cgi", query_params) .unwrap(), ) .body(Vec::new()) .unwrap(); let (_, body) = self.0.roundtrip(req, "text/plain").await?; if body.as_slice() == b"OK" { Ok(()) } else if body.as_slice().starts_with(b"# ") { // xxx: body contains error message Err(Error::Other("call failed for specific reason")) } else { Err(Error::Other("call failed for unknown reason")) } } } #[derive(Serialize)] struct ListParams<'a> { action: &'a str, #[serde(skip_serializing_if = "Option::is_none", rename = "listformat")] list_format: Option<&'a str>, #[serde( rename = "group", skip_serializing_if = "Option::is_none", serialize_with = "serialize_list_params_groups" )] groups: Option<&'a [&'a str]>, } fn serialize_list_params_groups<S>(groups: &Option<&[&str]>, ser: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { match groups { Some(groups) => { let groups = groups.join(","); ser.serialize_str(&groups) } None => unreachable!(), } } /// A set of parameter definitions. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ParameterDefinitions { /// The version of the data structures used to describe the parameter definitions. /// /// In practice, always `"1.0"`. #[serde(rename = "version")] pub schema_version: String, /// The name of the device model. pub model: Option<String>, /// The version of firmware running on the device. pub firmware_version: Option<String>, /// Parameter groups provided by this device. #[serde(rename = "group")] pub groups: Vec<ParameterGroupDefinition>, } /// A group of parameter definitions. /// /// May contain parameters or additional groups. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ParameterGroupDefinition { /// The name of the parameter group. pub name: String, /// Purpose unknown. pub max_groups: Option<u32>, /// The parameter groups nested within this parameter group. #[serde(rename = "group", default)] pub groups: Vec<ParameterGroupDefinition>, /// The parameters nested within this parameter group. #[serde(rename = "parameter", default)] pub parameters: Vec<ParameterDefinition>, } impl ParameterGroupDefinition { /// Find a nested parameter group by name. pub fn group(&self, name: &str) -> Option<&ParameterGroupDefinition> { self.groups.iter().find(|g| g.name == name) } /// Find a nested parameter by name. pub fn parameter(&self, name: &str) -> Option<&ParameterDefinition> { self.parameters.iter().find(|g| g.name == name) } } /// A parameter definition. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ParameterDefinition { /// The name of the parameter. pub name: String, /// The current value of the parameter, if any, expressed as a string. #[serde(rename = "value")] pub current_value: Option<String>, /// The security level of the parameter. pub security_level: Option<u32>, // FIXME: this is a 4-digit octal string. What does it mean? /// The name to display to the user, if different from `name`. pub nice_name: Option<String>, /// The type of this parameter, if provided. #[serde(rename = "type")] pub parameter_type: Option<ParameterTypeDefinition>, } impl ParameterDefinition { /// Return this parameter as a `bool`. Returns `None` if this parameter has no `current_value`, /// has no `parameter_type`, has a `parameter_type` with a `type_definition` other than /// `TypeDefinition::Bool`, or if `current_value` is neither `true_value` nor `false_value`. pub fn as_bool(&self) -> Option<bool> { match (self.current_value.as_ref(), self.parameter_type.as_ref()) { ( Some(value), Some(ParameterTypeDefinition { type_definition: TypeDefinition::Bool(td), .. }), ) => { if value == &td.true_value { Some(true) } else if value == &td.false_value { Some(false) } else { None } } _ => None, } } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub struct SecurityLevel { pub create: AccessLevel, pub delete: AccessLevel, pub read: AccessLevel, pub write: AccessLevel, } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum BadSecurityLevelError { BadAccessLevel(BadAccessLevelError), WrongLength(String), } impl fmt::Display for BadSecurityLevelError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { BadSecurityLevelError::BadAccessLevel(BadAccessLevelError(c)) => { write!(f, "expected access level digit, got '{}'", c) } BadSecurityLevelError::WrongLength(str) => { write!(f, "expected 4 digits, got {:?}", str) } } } } impl From<BadAccessLevelError> for BadSecurityLevelError { fn from(l: BadAccessLevelError) -> Self { BadSecurityLevelError::BadAccessLevel(l) } } impl FromStr for SecurityLevel { type Err = BadSecurityLevelError; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut chars = s.chars(); fn next( s: &str, chars: &mut std::str::Chars, ) -> Result<AccessLevel, BadSecurityLevelError> { chars .next() .ok_or_else(|| BadSecurityLevelError::WrongLength(s.into())) .and_then(|c| AccessLevel::try_from(c).map_err(|e| e.into())) } let security_level = Self { create: next(s, &mut chars)?, delete: next(s, &mut chars)?, read: next(s, &mut chars)?, write: next(s, &mut chars)?, }; if chars.next().is_none() { Ok(security_level) } else { Err(BadSecurityLevelError::WrongLength(s.into())) } } } impl fmt::Display for SecurityLevel { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Write; f.write_char(self.create.into())?; f.write_char(self.delete.into())?; f.write_char(self.read.into())?; f.write_char(self.write.into()) } } impl<'de> serde::de::Deserialize<'de> for SecurityLevel { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de>, { let s = String::deserialize(deserializer)?; FromStr::from_str(&s).map_err(serde::de::Error::custom) } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum AccessLevel { /// Not subject to access control. Unprotected, /// Accessible to viewers, operators, or administrators. ViewerAccess, /// Accessible to operators or administrators. OperatorAccess, /// Accessible to administrators. AdministratorAccess, /// Root access. Internal parameters that can be changed by firmware applications or by root /// editing the configuration files directly. RootAccess, } impl From<AccessLevel> for char { fn from(al: AccessLevel) -> Self { match al { AccessLevel::Unprotected => '0', AccessLevel::ViewerAccess => '1', AccessLevel::OperatorAccess => '4', AccessLevel::AdministratorAccess => '6', AccessLevel::RootAccess => '7', } } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub struct BadAccessLevelError(char); impl TryFrom<char> for AccessLevel { type Error = BadAccessLevelError; fn try_from(value: char) -> Result<Self, Self::Error> { match value { '0' => Ok(AccessLevel::Unprotected), '1' => Ok(AccessLevel::ViewerAccess), '4' => Ok(AccessLevel::OperatorAccess), '6' => Ok(AccessLevel::AdministratorAccess), '7' => Ok(AccessLevel::RootAccess), other => Err(BadAccessLevelError(other)), } } } /// A parameter type definition, describing flags and type information. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ParameterTypeDefinition { /// Is this parameter read-only? #[serde(rename = "readonly")] pub read_only: Option<bool>, /// Is this parameter write-only? #[serde(rename = "writeonly")] pub write_only: Option<bool>, /// Should this parameter be displayed? pub hidden: Option<bool>, /// Is this parameter constant? /// /// (FIXME: How does this differ from `read_only`?) #[serde(rename = "const")] pub constant: Option<bool>, /// Purpose unknown. #[serde(rename = "nosync")] pub no_sync: Option<bool>, /// Purpose unknown. pub internal: Option<bool>, /// The type definition of this parameter, describing its domain and encoding. #[serde(rename = "$value")] pub type_definition: TypeDefinition, } /// A type definition, describing a parameter's domain and encoding. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub enum TypeDefinition { /// A string, to be displayed as a text box. String(StringParameterDefinition), /// A string, to be displayed as a password box. Password(PasswordParameterDefinition), /// An integer, to be displayed as a text box. Int(IntParameterDefinition), /// An enumeration, to be displayed as a select box. Enum(EnumParameterDefinition), /// A boolean, to be displayed as a select box. Bool(BoolParameterDefinition), /// An IP address. /// /// FIXME: IPv4 or IPv6? Ip, /// A list of IP addresses. /// /// FIXME: encoding? IpList, /// A hostname. /// /// FIXME: details? Hostname, /// A string, to be displayed as a multiline text box. TextArea, } /// String parameter definition details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct StringParameterDefinition { /// The maximum length of the string. #[serde(rename = "maxlen")] pub max_len: Option<u32>, } /// Password parameter definition details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PasswordParameterDefinition { /// The maximum length of the string. #[serde(rename = "maxlen")] pub max_len: Option<u32>, } /// Integer parameter definition details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct IntParameterDefinition { /// The minimum value of the integer. pub min: Option<i64>, /// The maximum value of the integer. pub max: Option<i64>, /// The maximum length of the integer as a string. #[serde(rename = "maxlen")] pub max_len: Option<u8>, /// Range(s) in which the integer must be contained. pub range_entries: Option<Vec<IntParameterRangeDefinition>>, } /// Integer parameter range definiton details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct IntParameterRangeDefinition { /// TODO: parse "0" and "1024-65534" into something more appropriate pub value: String, } /// Enumeration parameter definition details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct EnumParameterDefinition { /// A list of entries from which the parameter value must be selected. #[serde(rename = "entry")] pub values: Vec<EnumEntryDefinition>, } /// An enumeration entry. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct EnumEntryDefinition { /// The value of the parameter. pub value: String, /// The value to display to the user, if different from `value`. pub nice_value: Option<String>, } /// Boolean parameter definition details. #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct BoolParameterDefinition { /// The string value used to represent `true`. #[serde(rename = "true")] pub true_value: String, /// The string value used to represent `false`. #[serde(rename = "false")] pub false_value: String, } #[cfg(test)] mod tests { #[test] fn list() { crate::test_with_devices(|test_device| async move { let parameters = test_device.client.parameters(); let all_params = parameters.list(None).await?; let brand_params = parameters.list(Some(&["root.Brand"])).await?; let brand_and_firmware_params = parameters .list(Some(&["root.Brand", "root.Properties.Firmware"])) .await?; assert!(all_params.len() > 0); assert!( all_params.len() > brand_params.len(), "all_params.len() = {} is not greater than brand_params.len() = {}", all_params.len(), brand_params.len() ); assert!( all_params.len() > brand_and_firmware_params.len(), "all_params.len() = {} is not greater than brand_and_firmware_params.len() = {}", all_params.len(), brand_and_firmware_params.len() ); assert!( brand_and_firmware_params.len() > brand_params.len(), "brand_and_firmware_params.len() = {} is not greater than brand_params.len() = {}", brand_and_firmware_params.len(), brand_params.len() ); Ok(()) }); } #[test] fn list_definitions() { crate::test_with_devices(|test_device| async move { let parameters = test_device.client.parameters(); let all_params = parameters.list_definitions(None).await?; let brand_params = parameters.list_definitions(Some(&["root.Brand"])).await?; let brand_and_firmware_params = parameters .list_definitions(Some(&["root.Brand", "root.Properties.Firmware"])) .await?; assert_eq!(all_params.groups.len(), 1); assert_eq!(brand_params.groups.len(), 1); assert_eq!(brand_and_firmware_params.groups.len(), 1); assert_eq!(all_params.model, brand_params.model); assert_eq!(all_params.model, brand_and_firmware_params.model); assert_eq!(all_params.firmware_version, brand_params.firmware_version); assert_eq!( all_params.firmware_version, brand_and_firmware_params.firmware_version ); assert!(all_params.groups[0].groups.len() > 2); assert_eq!(brand_params.groups[0].groups.len(), 1); assert_eq!(brand_and_firmware_params.groups[0].groups.len(), 2); Ok(()) }); } #[tokio::test] async fn update() { let device = crate::mock_client(|req| { assert_eq!(req.method(), http::Method::GET); assert_eq!( req.uri().path_and_query().map(|pq| pq.as_str()), Some("/axis-cgi/param.cgi?action=update&foo.bar=baz+quxx") ); http::Response::builder() .status(http::StatusCode::OK) .header(http::header::CONTENT_TYPE, "text/plain") .body(vec![b"OK".to_vec()]) }); let response = device .parameters() .update(vec![("foo.bar", "baz quxx")]) .await; match response { Ok(()) => {} Err(e) => panic!("update should succeed: {}", e), }; let device = crate::mock_client(|req| { assert_eq!(req.method(), http::Method::GET); assert_eq!( req.uri().path_and_query().map(|pq| pq.as_str()), Some("/axis-cgi/param.cgi?action=update&foo.bar=baz+quxx") ); http::Response::builder() .status(http::StatusCode::OK) .header(http::header::CONTENT_TYPE, "text/plain") .body(vec![ b"# Error: Error setting 'foo.bar' to 'baz quxx'!".to_vec() ]) }); let response = device .parameters() .update(vec![("foo.bar", "baz quxx")]) .await; match response { Err(crate::Error::Other(_)) => {} Ok(()) => panic!("update should fail"), Err(e) => panic!("update should fail with a different error: {}", e), }; } }