text stringlengths 8 4.13M |
|---|
use std::io::{stdin, stdout, Read, Write};
use termion::event::Key;
use termion::input::TermRead;
fn main() {
let mut stdout = stdout();
write!(stdout, "Pressione uma tecla: ").unwrap();
stdout.flush().unwrap();
for c in stdin().keys() {
match c.unwrap() {
Key::Char('q') => {
write!(stdout, "\n\rSaindo...\n\r").unwrap();
break;
}
Key::Char(c) => {
write!(stdout, "\n\rVocê pressionou a tecla '{}'.\n\r", c).unwrap();
write!(stdout, "Pressione outra tecla: ").unwrap();
stdout.flush().unwrap();
}
_ => {}
}
}
}
|
/*!
```rudra-poc
[target]
crate = "acc_reader"
version = "2.0.0"
[report]
issue_url = "https://github.com/netvl/acc_reader/issues/1"
issue_date = 2020-12-27
rustsec_url = "https://github.com/RustSec/advisory-db/pull/664"
rustsec_id = "RUSTSEC-2020-0155"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "UninitExposure"
bug_count = 2
rudra_report_locations = ["src/lib.rs:245:5: 266:6", "src/lib.rs:194:5: 219:6"]
```
!*/
#![forbid(unsafe_code)]
fn main() {
panic!("This issue was reported without PoC");
}
|
#[doc = "Reader of register DDRPHYC_DCUAR"]
pub type R = crate::R<u16, super::DDRPHYC_DCUAR>;
#[doc = "Writer for register DDRPHYC_DCUAR"]
pub type W = crate::W<u16, super::DDRPHYC_DCUAR>;
#[doc = "Register DDRPHYC_DCUAR `reset()`'s with value 0"]
impl crate::ResetValue for super::DDRPHYC_DCUAR {
type Type = u16;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CWADDR`"]
pub type CWADDR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CWADDR`"]
pub struct CWADDR_W<'a> {
w: &'a mut W,
}
impl<'a> CWADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u16) & 0x0f);
self.w
}
}
#[doc = "Reader of field `CSADDR`"]
pub type CSADDR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CSADDR`"]
pub struct CSADDR_W<'a> {
w: &'a mut W,
}
impl<'a> CSADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u16) & 0x0f) << 4);
self.w
}
}
#[doc = "CSEL\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum CSEL_A {
#[doc = "0: Command cache"]
B_0X0 = 0,
#[doc = "1: Expected data cache"]
B_0X1 = 1,
#[doc = "2: Read data cache"]
B_0X2 = 2,
}
impl From<CSEL_A> for u8 {
#[inline(always)]
fn from(variant: CSEL_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `CSEL`"]
pub type CSEL_R = crate::R<u8, CSEL_A>;
impl CSEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, CSEL_A> {
use crate::Variant::*;
match self.bits {
0 => Val(CSEL_A::B_0X0),
1 => Val(CSEL_A::B_0X1),
2 => Val(CSEL_A::B_0X2),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == CSEL_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == CSEL_A::B_0X1
}
#[doc = "Checks if the value of the field is `B_0X2`"]
#[inline(always)]
pub fn is_b_0x2(&self) -> bool {
*self == CSEL_A::B_0X2
}
}
#[doc = "Write proxy for field `CSEL`"]
pub struct CSEL_W<'a> {
w: &'a mut W,
}
impl<'a> CSEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CSEL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Command cache"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(CSEL_A::B_0X0)
}
#[doc = "Expected data cache"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(CSEL_A::B_0X1)
}
#[doc = "Read data cache"]
#[inline(always)]
pub fn b_0x2(self) -> &'a mut W {
self.variant(CSEL_A::B_0X2)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u16) & 0x03) << 8);
self.w
}
}
#[doc = "Reader of field `INCA`"]
pub type INCA_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INCA`"]
pub struct INCA_W<'a> {
w: &'a mut W,
}
impl<'a> INCA_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u16) & 0x01) << 10);
self.w
}
}
#[doc = "ATYPE\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ATYPE_A {
#[doc = "0: Write access"]
B_0X0 = 0,
#[doc = "1: Read access"]
B_0X1 = 1,
}
impl From<ATYPE_A> for bool {
#[inline(always)]
fn from(variant: ATYPE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ATYPE`"]
pub type ATYPE_R = crate::R<bool, ATYPE_A>;
impl ATYPE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ATYPE_A {
match self.bits {
false => ATYPE_A::B_0X0,
true => ATYPE_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == ATYPE_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == ATYPE_A::B_0X1
}
}
#[doc = "Write proxy for field `ATYPE`"]
pub struct ATYPE_W<'a> {
w: &'a mut W,
}
impl<'a> ATYPE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ATYPE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Write access"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(ATYPE_A::B_0X0)
}
#[doc = "Read access"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(ATYPE_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u16) & 0x01) << 11);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - CWADDR"]
#[inline(always)]
pub fn cwaddr(&self) -> CWADDR_R {
CWADDR_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - CSADDR"]
#[inline(always)]
pub fn csaddr(&self) -> CSADDR_R {
CSADDR_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:9 - CSEL"]
#[inline(always)]
pub fn csel(&self) -> CSEL_R {
CSEL_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bit 10 - INCA"]
#[inline(always)]
pub fn inca(&self) -> INCA_R {
INCA_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - ATYPE"]
#[inline(always)]
pub fn atype(&self) -> ATYPE_R {
ATYPE_R::new(((self.bits >> 11) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - CWADDR"]
#[inline(always)]
pub fn cwaddr(&mut self) -> CWADDR_W {
CWADDR_W { w: self }
}
#[doc = "Bits 4:7 - CSADDR"]
#[inline(always)]
pub fn csaddr(&mut self) -> CSADDR_W {
CSADDR_W { w: self }
}
#[doc = "Bits 8:9 - CSEL"]
#[inline(always)]
pub fn csel(&mut self) -> CSEL_W {
CSEL_W { w: self }
}
#[doc = "Bit 10 - INCA"]
#[inline(always)]
pub fn inca(&mut self) -> INCA_W {
INCA_W { w: self }
}
#[doc = "Bit 11 - ATYPE"]
#[inline(always)]
pub fn atype(&mut self) -> ATYPE_W {
ATYPE_W { w: self }
}
}
|
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_variables)]
use std::mem;
pub fn arrays()
{
let arr = [1u8; 10];
println!("{:?}", arr);
println!("Size of arr is {}", mem::size_of_val(&arr));
let mtx_arr:[[u8;3];2] = [
[2,3,4],
[4,5,9]
];
println!("{:?}", mtx_arr);
}
fn use_slice(slice: &mut [i8])
{
println!("{:?}", slice);
let res = slice.get(0);
match res
{
Some(_x) => slice[0] = 98,
None => ()
}
}
pub fn slice()
{
let mut data:[i8;5] = [1,2,3,4,5];
use_slice(&mut data[1..4]);
println!("{:?}", data);
}
pub fn vectors()
{
let mut a = Vec::new();
a.push(6);
a.push(9);
a.push(12);
println!("{:?}", a);
a.push(44);
println!("{:?}", a);
let idx:usize = 3;
a[idx] = 982;
let res = a.get(2);
match res
{
Some(val) => println!("{}", val),
None => ()
}
// for x in &a { println!("{}", x);}
while let Some(x) = a.pop()
{
println!("{}", x);
}
} |
#[doc = "Register `PDCRE` reader"]
pub type R = crate::R<PDCRE_SPEC>;
#[doc = "Register `PDCRE` writer"]
pub type W = crate::W<PDCRE_SPEC>;
#[doc = "Field `PD0` reader - Port E pull-down bit y (y=0..15)"]
pub type PD0_R = crate::BitReader;
#[doc = "Field `PD0` writer - Port E pull-down bit y (y=0..15)"]
pub type PD0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PD1` reader - Port E pull-down bit y (y=0..15)"]
pub type PD1_R = crate::BitReader;
#[doc = "Field `PD1` writer - Port E pull-down bit y (y=0..15)"]
pub type PD1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PD2` reader - Port E pull-down bit y (y=0..15)"]
pub type PD2_R = crate::BitReader;
#[doc = "Field `PD2` writer - Port E pull-down bit y (y=0..15)"]
pub type PD2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PD3` reader - Port E pull-down bit y (y=0..15)"]
pub type PD3_R = crate::BitReader;
#[doc = "Field `PD3` writer - Port E pull-down bit y (y=0..15)"]
pub type PD3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PD4` reader - Port E pull-down bit y (y=0..15)"]
pub type PD4_R = crate::BitReader;
#[doc = "Field `PD4` writer - Port E pull-down bit y (y=0..15)"]
pub type PD4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
pub fn pd0(&self) -> PD0_R {
PD0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
pub fn pd1(&self) -> PD1_R {
PD1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
pub fn pd2(&self) -> PD2_R {
PD2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
pub fn pd3(&self) -> PD3_R {
PD3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
pub fn pd4(&self) -> PD4_R {
PD4_R::new(((self.bits >> 4) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
#[must_use]
pub fn pd0(&mut self) -> PD0_W<PDCRE_SPEC, 0> {
PD0_W::new(self)
}
#[doc = "Bit 1 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
#[must_use]
pub fn pd1(&mut self) -> PD1_W<PDCRE_SPEC, 1> {
PD1_W::new(self)
}
#[doc = "Bit 2 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
#[must_use]
pub fn pd2(&mut self) -> PD2_W<PDCRE_SPEC, 2> {
PD2_W::new(self)
}
#[doc = "Bit 3 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
#[must_use]
pub fn pd3(&mut self) -> PD3_W<PDCRE_SPEC, 3> {
PD3_W::new(self)
}
#[doc = "Bit 4 - Port E pull-down bit y (y=0..15)"]
#[inline(always)]
#[must_use]
pub fn pd4(&mut self) -> PD4_W<PDCRE_SPEC, 4> {
PD4_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Power Port E pull-down control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pdcre::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pdcre::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PDCRE_SPEC;
impl crate::RegisterSpec for PDCRE_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pdcre::R`](R) reader structure"]
impl crate::Readable for PDCRE_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pdcre::W`](W) writer structure"]
impl crate::Writable for PDCRE_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PDCRE to value 0"]
impl crate::Resettable for PDCRE_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![no_std]
#![no_main]
use cortex_m::asm::nop;
use cortex_m_rt::entry;
use panic_halt as _;
use rtt_target::{rprintln, rtt_init_print};
use rp2040_pac::{Peripherals, XOSC};
mod usb;
#[link_section = ".boot_loader"]
#[used]
pub static BOOT_LOADER: [u8; 256] = rp2040_boot2::BOOT_LOADER;
/// Handle peripheral resets so the chip is usable.
unsafe fn setup_chip(p: &mut rp2040_pac::Peripherals) {
// Now reset all the peripherals, except QSPI and XIP (we're using those
// to execute from external flash!)
p.RESETS.reset.write(|w| {
w.adc().set_bit();
w.busctrl().set_bit();
w.dma().set_bit();
w.i2c0().set_bit();
w.i2c1().set_bit();
w.io_bank0().set_bit();
w.io_qspi().clear_bit();
w.jtag().set_bit();
w.pads_bank0().set_bit();
w.pads_qspi().clear_bit();
w.pio0().set_bit();
w.pio1().set_bit();
w.pll_sys().clear_bit();
w.pll_usb().clear_bit();
w.pwm().set_bit();
w.rtc().set_bit();
w.spi0().set_bit();
w.spi1().set_bit();
w.syscfg().set_bit();
w.sysinfo().set_bit();
w.tbman().set_bit();
w.timer().set_bit();
w.uart0().set_bit();
w.uart1().set_bit();
w.usbctrl().set_bit();
w
});
const RESETS_RESET_BITS: u32 = 0x01ffffff;
const RESETS_RESET_USBCTRL_BITS: u32 = 0x01000000;
const RESETS_RESET_UART1_BITS: u32 = 0x00800000;
const RESETS_RESET_UART0_BITS: u32 = 0x00400000;
const RESETS_RESET_SPI1_BITS: u32 = 0x00020000;
const RESETS_RESET_SPI0_BITS: u32 = 0x00010000;
const RESETS_RESET_RTC_BITS: u32 = 0x00008000;
const RESETS_RESET_ADC_BITS: u32 = 0x00000001;
// We want to take everything out of reset, except these peripherals:
//
// * ADC
// * RTC
// * SPI0
// * SPI1
// * UART0
// * UART1
// * USBCTRL
//
// These must stay in reset until the clocks are sorted out.
const PERIPHERALS_TO_UNRESET: u32 = RESETS_RESET_BITS
& !(RESETS_RESET_ADC_BITS
| RESETS_RESET_RTC_BITS
| RESETS_RESET_SPI0_BITS
| RESETS_RESET_SPI1_BITS
| RESETS_RESET_UART0_BITS
| RESETS_RESET_UART1_BITS
| RESETS_RESET_USBCTRL_BITS);
// Write 0 to the reset field to take it out of reset
// TODO: Figure out which should be taken out of reset here
p.RESETS.reset.modify(|_r, w| {
w.busctrl().clear_bit();
w.dma().clear_bit();
w.i2c0().clear_bit();
w.i2c1().clear_bit();
w.io_bank0().clear_bit();
w.io_qspi().clear_bit();
w.jtag().clear_bit();
w.pads_bank0().clear_bit();
w.pads_qspi().clear_bit();
w.pio0().clear_bit();
w.pio1().clear_bit();
w.pll_sys().clear_bit();
w.pll_usb().clear_bit();
w.pwm().clear_bit();
w.syscfg().clear_bit();
w.sysinfo().clear_bit();
w.tbman().clear_bit();
w.timer().clear_bit();
w
});
while ((!p.RESETS.reset_done.read().bits()) & PERIPHERALS_TO_UNRESET) != 0 {
cortex_m::asm::nop();
}
}
const XOSC_MHZ: u16 = 12;
const MHZ: u32 = 1_000_000;
fn enable_xosc(osc: &mut XOSC, freq_mhz: u16) {
// Clear BADWRITE bit in status register
osc.status.write(|w| w.badwrite().set_bit());
// Enable external oscillator XOSC
osc.ctrl
.modify(|_r, w| w.freq_range()._1_15mhz().enable().enable());
// Calculate startup delay according to section 2.16.3 of the datasheet
//
// Round up in case there is no exact value found.
let startup_delay = osc_startup_delay(freq_mhz as u32);
// Configure startup delay
unsafe {
osc.startup.write(|w| w.delay().bits(startup_delay as u16));
}
// Wait until clock is started
loop {
if osc.status.read().stable().bit_is_set() {
break;
}
}
rprintln!("XOSC Status: {:#x}", osc.status.read().bits());
}
const fn osc_startup_delay(freq_mhz: u32) -> u32 {
(((freq_mhz as u32 * MHZ) / 1000) + 128) / 256
}
/// Port of the clocks_init function from the Pico SDK
unsafe fn clocks_init(p: &mut Peripherals) {
// Enable tick generation in Watchdog
//
// This is necessary to use the timer
p.WATCHDOG
.tick
.write(|w| w.cycles().bits(XOSC_MHZ).enable().set_bit());
// Disable resus, if it's active for some reason
p.CLOCKS
.clk_sys_resus_ctrl
.modify(|_r, w| w.enable().clear_bit());
// Enable external oscillator XOSC
enable_xosc(&mut p.XOSC, XOSC_MHZ);
// `clk_sys` and `clk_ref` must be switched from the auxiliary multiplexer (aux mux),
// to the glitchless mux, before changing them. (See section 2.15.3.2 in the datasheet)
// TODO: Use bitbanded register to do this atomically
// Use reference clock, not the aux mux.
p.CLOCKS.clk_sys_ctrl.modify(|_r, w| w.src().clk_ref());
// Wait until clock source is changed
while p.CLOCKS.clk_sys_selected.read().bits() != 1 {
nop()
}
// TODO: Use bitbanded register to do this atomically
// Use ring oscilator (ROSC) as the clock source, not XOSC or aux
p.CLOCKS
.clk_ref_ctrl
.modify(|_r, w| w.src().rosc_clksrc_ph());
// Wait until clock source is changed
while p.CLOCKS.clk_ref_selected.read().bits() != 1 {
nop()
}
// Setup PLLs
p.RESETS
.reset
.modify(|_r, w| w.pll_sys().set_bit().pll_usb().set_bit());
p.RESETS
.reset
.modify(|_r, w| w.pll_sys().clear_bit().pll_usb().clear_bit());
loop {
let reset_done = p.RESETS.reset_done.read();
if reset_done.pll_sys().bit_is_set() && reset_done.pll_usb().bit_is_set() {
break;
}
}
// REF FBDIV VCO POSTDIV
// PLL SYS: 12 / 1 = 12MHz * 125 = 1500MHZ / 6 / 2 = 125MHz
// PLL USB: 12 / 1 = 12MHz * 40 = 480 MHz / 5 / 2 = 48MHz
pll_init(&p.PLL_SYS, XOSC_MHZ, 1, 1500 * MHZ, 6, 2);
pll_init(&p.PLL_USB, XOSC_MHZ, 1, 480 * MHZ, 5, 2);
// configure reference clock
//
// src: 12 MHz (XOSC)
// dst: 12 MHz
let src_freq = 12 * MHZ;
let dst_freq = 12 * MHZ;
let div = (((src_freq << 8) as u64) / dst_freq as u64) as u32;
rprintln!("clock_ref: {} -> {} (div={})", src_freq, dst_freq, div);
// Set the divisor first if we increase it, to avoid overspeed.
if div > p.CLOCKS.clk_ref_div.read().bits() {
p.CLOCKS.clk_ref_div.write(|w| w.bits(div))
}
p.CLOCKS.clk_ref_ctrl.modify(|_r, w| w.src().xosc_clksrc());
while (p.CLOCKS.clk_ref_selected.read().bits() & (1 << 2)) != (1 << 2) {}
// Set the dividor again, now it's safe to set
p.CLOCKS.clk_ref_div.write(|w| w.bits(div));
// configure system clock
//
// -> should run from aux source (PLL)
//
// src: 125 MHz (pll)
// dst: 125 MHz
let src_freq = 125 * MHZ;
let dst_freq = 125 * MHZ;
let div = (((src_freq as u64) << 8) / dst_freq as u64) as u32;
// Set the divisor first if we increase it, to avoid overspeed.
if div > p.CLOCKS.clk_sys_div.read().bits() {
p.CLOCKS.clk_sys_div.write(|w| w.bits(div))
}
// We would have to switch away from the aux clock source, but we know that we did that already
// above.
// Select PLL in aux mux
p.CLOCKS
.clk_sys_ctrl
.modify(|_r, w| w.auxsrc().clksrc_pll_sys());
// Select aux mux in glitchless mux
p.CLOCKS
.clk_sys_ctrl
.modify(|_r, w| w.src().clksrc_clk_sys_aux());
// Wait until aux mux selected
// Aux src has offset 1 -> bit 1
while (p.CLOCKS.clk_sys_selected.read().bits() & (1 << 1)) != (1 << 1) {}
// Set the dividor again, now it's sure to set
p.CLOCKS.clk_sys_div.write(|w| w.bits(div));
// configure USB clock
//
// -> should run from aux source (PLL USB)
//
// src: 48 MHz (pll)
// dst: 48 MHz
let src_freq = 48 * MHZ;
let dst_freq = 48 * MHZ;
let div = clock_divider(src_freq, dst_freq);
rprintln!("clock_ref: {} -> {} (div={})", src_freq, dst_freq, div);
// Set the divisor first if we increase it, to avoid overspeed.
if div > p.CLOCKS.clk_usb_div.read().bits() {
p.CLOCKS.clk_usb_div.write(|w| w.bits(div))
}
// We would have to switch away from the aux clock source, but we know that we did that already
// above.
// disable the clock before switching
p.CLOCKS.clk_usb_ctrl.modify(|_r, w| w.enable().clear_bit());
// We have to wait 3 cycles of the target clock
//
// TODO: Make this generic
//
// For know, we now that the sysclock is 125 MHz, so waiting to clock cycles is enough
nop();
nop();
// Select PLL in aux mux
p.CLOCKS
.clk_usb_ctrl
.modify(|_r, w| w.auxsrc().clksrc_pll_usb());
// Enable clock again
p.CLOCKS.clk_usb_ctrl.modify(|_r, w| w.enable().set_bit());
// Set the dividor again, now it's safe to set
p.CLOCKS.clk_usb_div.write(|w| w.bits(div));
// configure ADC clock
//
// -> should run from aux source (PLL USB)
//
// src: 48 MHz (pll)
// dst: 48 MHz
let src_freq = 48 * MHZ;
let dst_freq = 48 * MHZ;
let div = clock_divider(src_freq, dst_freq);
// Set the divisor first if we increase it, to avoid overspeed.
if div > p.CLOCKS.clk_adc_div.read().bits() {
p.CLOCKS.clk_adc_div.write(|w| w.bits(div))
}
// We would have to switch away from the aux clock source, but we know that we did that already
// above.
// disable the clock before switching
p.CLOCKS.clk_adc_ctrl.modify(|_r, w| w.enable().clear_bit());
// We have to wait 3 cycles of the target clock
//
// TODO: Make this generic
//
// For now, we know that the sysclock is 125 MHz, so waiting two clock cycles is enough
nop();
nop();
// Select PLL in aux mux
p.CLOCKS
.clk_adc_ctrl
.modify(|_r, w| w.auxsrc().clksrc_pll_usb());
// Enable clock again
p.CLOCKS.clk_adc_ctrl.modify(|_r, w| w.enable().set_bit());
// Set the dividor again, now it's safe to set
p.CLOCKS.clk_adc_div.write(|w| w.bits(div));
// configure RTC clock
//
// -> should run from aux source (PLL USB)
//
// src: 48 MHz (pll)
// dst: 46875 Hz
let src_freq = 48 * MHZ;
let dst_freq = 46875;
let div = clock_divider(src_freq, dst_freq);
// Set the divisor first if we increase it, to avoid overspeed.
if div > p.CLOCKS.clk_rtc_div.read().bits() {
p.CLOCKS.clk_rtc_div.write(|w| w.bits(div))
}
// We would have to switch away from the aux clock source, but we know that we did that already
// above.
// disable the clock before switching
p.CLOCKS.clk_rtc_ctrl.modify(|_r, w| w.enable().clear_bit());
// We have to wait 3 cycles of the target clock
//
// TODO: Make this generic
//
// For now, we now that the sysclock is 125 MHz, so waiting to clock cycles is enough
nop();
nop();
// Select PLL in aux mux
p.CLOCKS
.clk_rtc_ctrl
.modify(|_r, w| w.auxsrc().clksrc_pll_usb());
// Enable clock again
p.CLOCKS.clk_rtc_ctrl.modify(|_r, w| w.enable().set_bit());
// Set the dividor again, now it's safe to set
p.CLOCKS.clk_rtc_div.write(|w| w.bits(div));
// configure PERI clock
//
// -> should run from sys clk
//
// src: 125 MHz (pll)
// dst: 125 MHz
// No divisor for peri clk!
// We would have to switch away from the aux clock source, but we know that we did that already
// above.
// disable the clock before switching
p.CLOCKS
.clk_peri_ctrl
.modify(|_r, w| w.enable().clear_bit());
// We have to wait 3 cycles of the target clock
//
// TODO: Make this generic
//
// For now, we now that the sysclock is 125 MHz, so waiting to clock cycles is enough
nop();
nop();
nop();
nop();
// Select PLL in aux mux
p.CLOCKS.clk_peri_ctrl.modify(|_r, w| w.auxsrc().clk_sys());
// Enable clock again
p.CLOCKS.clk_peri_ctrl.modify(|_r, w| w.enable().set_bit());
}
const fn clock_divider(src_freq: u32, dst_freq: u32) -> u32 {
(((src_freq as u64) << 8) / dst_freq as u64) as u32
}
type Pll = rp2040_pac::pll_sys::RegisterBlock;
fn pll_init(
pll: &Pll,
osc_freq_mhz: u16,
ref_div: u8,
vco_freq: u32,
post_div1: u32,
post_div2: u8,
) {
// Turn off PLL, in case it is already running
unsafe {
pll.pwr.write(|w| w.bits(0xffffffff));
pll.fbdiv_int.write(|w| w.bits(0));
}
// Ref div divides the reference frequency
let ref_mhz = osc_freq_mhz as u32 / ref_div as u32;
unsafe {
pll.cs.write(|w| w.refdiv().bits(ref_div));
}
// Feedback Divide
//
let fbdiv = vco_freq / (ref_mhz * MHZ);
rprintln!("PLL REF_MHZ: {}", ref_mhz);
rprintln!("PLL rev_div: {}", ref_div);
rprintln!("PLL fbdiv: {}", fbdiv);
rprintln!(
"PLL Freq: {}",
(osc_freq_mhz as u32 / ref_div as u32) * fbdiv / (post_div1 * post_div2 as u32)
);
// TODO: additional checks for PLL params
assert!((16..=320).contains(&fbdiv));
unsafe { pll.fbdiv_int.write(|w| w.fbdiv_int().bits(fbdiv as u16)) }
pll.pwr
.modify(|_r, w| w.pd().clear_bit().vcopd().clear_bit());
// Wait for PLL to lock
while pll.cs.read().lock().bit_is_clear() {}
// Set up post dividers
unsafe {
pll.prim.write(|w| {
w.postdiv1()
.bits(post_div1 as u8)
.postdiv2()
.bits(post_div2)
});
}
// Turn on post divider
pll.pwr.modify(|_r, w| w.postdivpd().clear_bit());
}
const ALL_PERIPHERALS_UNRESET: u32 = 0x01ffffff;
#[entry]
fn main() -> ! {
rtt_init_print!(NoBlockSkip, 4096);
let mut p = rp2040_pac::Peripherals::take().unwrap();
unsafe {
setup_chip(&mut p);
}
// Setup clocks?
unsafe {
clocks_init(&mut p);
}
// Enable all peripherals
unsafe {
p.RESETS.reset.write_with_zero(|w| w.bits(0));
while ((!p.RESETS.reset_done.read().bits()) & ALL_PERIPHERALS_UNRESET) != 0 {
cortex_m::asm::nop();
}
}
rprintln!("- Reset done");
rprintln!(
"- PLL SYS: {} kHz",
frequency_count_khz(&p.CLOCKS, Clock::PllSys, 12 * 1000)
);
rprintln!(
"- PLL USB: {} kHz",
frequency_count_khz(&p.CLOCKS, Clock::PllUsb, 12 * 1000)
);
rprintln!("CLK_USB_DIV: {:#08x}", p.CLOCKS.clk_usb_div.read().bits());
rprintln!("CLK_USB_CTRL: {:#08x}", p.CLOCKS.clk_usb_ctrl.read().bits());
// Prepare LED
// Code from https://github.com/rp-rs/pico-blink-rs, by @thejpster
//
// Set GPIO25 to be an input (output enable is cleared)
p.SIO.gpio_oe_clr.write(|w| unsafe {
w.bits(1 << 25);
w
});
// Set GPIO25 to be an output low (output is cleared)
p.SIO.gpio_out_clr.write(|w| unsafe {
w.bits(1 << 25);
w
});
// Configure pin 25 for GPIO
p.PADS_BANK0.gpio25.write(|w| {
// Output Disable off
w.od().clear_bit();
// Input Enable on
w.ie().set_bit();
w
});
p.IO_BANK0.gpio25_ctrl.write(|w| {
// Map pin 25 to SIO
w.funcsel().sio_25();
w
});
// Set GPIO25 to be an output (output enable is set)
p.SIO.gpio_oe_set.write(|w| unsafe {
w.bits(1 << 25);
w
});
// -- END -- Code from https://github.com/rp-rs/pico-blink-rs, by @thejpster
let resets = p.RESETS;
let usb_ctrl = p.USBCTRL_REGS;
let mut usb_device = usb::usb_device_init(&resets, usb_ctrl);
// Wait for USB configuration
while !usb_device.configured() {
usb_device.poll();
}
/* Enable LED to verify we get here */
// Set GPIO25 to be high
p.SIO.gpio_out_set.write(|w| unsafe {
w.bits(1 << 25);
w
});
// Start to receive data from the Host
usb_device.start_transfer(usb::EP1_OUT_ADDR, 64, None);
loop {
usb_device.poll();
}
}
/// Clock source for frequency counter
enum Clock {
PllSys = 1,
PllUsb = 2,
Sys = 0x9,
Peri = 0xa,
Usb = 0xb,
Adc = 0xc,
Rtc = 0xd,
}
fn frequency_count_khz(clocks: &rp2040_pac::CLOCKS, src: Clock, reference_freq_khz: u32) -> u32 {
// Wait until Frequency counter is not running anymore
while clocks.fc0_status.read().running().bit_is_set() {}
unsafe {
clocks
.fc0_ref_khz
.modify(|r, w| w.fc0_ref_khz().bits(reference_freq_khz));
clocks.fc0_interval.write(|w| w.fc0_interval().bits(10));
clocks.fc0_min_khz.write(|w| w.fc0_min_khz().bits(0));
clocks.fc0_max_khz.write(|w| w.fc0_max_khz().bits(u32::MAX));
// Start measurement by selecting source clock
clocks.fc0_src.write(|w| w.fc0_src().bits(src as u8));
}
while clocks.fc0_status.read().done().bit_is_clear() {}
clocks.fc0_result.read().khz().bits()
}
|
mod applet;
mod seeds;
fn main() {
let sds = seeds::Seeds::from_file().unwrap_or(seeds::Seeds::new());
let app = applet::Applet::new(sds);
app.run()
}
|
pub trait Summary {
fn summarize(&self) -> String {
format!("Read more by {}...", self.summarize_author())
}
fn summarize_author(&self) -> String;
}
pub struct NewsArticle {
pub headline: String,
pub location: String,
pub author: String,
pub content: String,
}
impl Summary for NewsArticle {
fn summarize_author(&self) -> String {
self.author.clone()
}
}
pub struct Tweet {
pub username: String,
pub content: String,
pub metadata: String,
}
impl Summary for Tweet {
fn summarize(&self) -> String {
format!("{} tweeted: {}", self.summarize_author(), self.content)
}
fn summarize_author(&self) -> String {
format!("@{}", self.username)
}
}
// a function which takes in a type that implements Summary trait
pub fn notify(item: &impl Summary) {
println!("Breaking news! {}", item.summarize());
}
// a function which also takes in a type that implements Summary trait
// the original notify function is actually syntactic sugar for this
pub fn notify_v2<T: Summary>(item: &T) {
println!("Breaking news v2! {}", item.summarize());
}
// if we wanted two parameters to be of the same type, we would have to use the above Trait bound
// syntax
pub fn notify_double<T: Summary>(item1: &T, item2: &T) {
println!("First piece of breaking news! {}", item1.summarize());
println!("Second piece of breaking news! {}", item2.summarize());
}
pub fn notify_double_nonbound(item1: &impl Summary, item2: &impl Summary) {
println!("First piece of breaking news! {}", item1.summarize());
println!("Second piece of breaking news! {}", item2.summarize());
}
// we can also specify that we want a type that implements multiple traits with the + syntax
pub fn notify_with_display(item: &(impl Summary + Display)) {}
pub fn notify_with_bound_display<T: Summary + Display>(item: &T) {}
pub fn trait_bound_with_where<T, U>(t: &T, u: &U) -> i32
where
T: Display + Clone,
U: Clone + Debug,
{
}
use std::fm::Display;
struct Pair<T> {
x: T,
y: T,
}
impl<T> Pair<T> {
fn new(x: T, y: T) -> Self {
Self { x, y }
}
}
impl<T: Display + PartialOrd> Pair<T> {
fn cmp_display(&self) {
if self.x >= self.y {
println!("The largest member is x = {}", self.x);
} else {
println!("The largest member is y = {}", self.y);
}
}
}
// use impl Trait syntax for return type
fn returns_summarizable() -> impl Summary {
Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
metadata: String::from("los angeles, ca"),
}
}
fn main() {
let tweet = Tweet {
username: String::from("the_fresh_prince"),
content: String::from("hello mutuals"),
metadata: String::from("los angeles, ca"),
};
let news = NewsArticle {
author: String::from("Collin Prince"),
location: String::from("Los Angeles"),
headline: String::from("Nice"),
content: String::from("top of the morning"),
};
println!("1 new tweet: {}", tweet.summarize());
println!("1 new article: {}", news.summarize());
notify(&tweet);
notify_v2(&tweet);
// this will work as both news & tweet implement Summary even though they are different types
notify_double_nonbound(&tweet, &news);
// will not work, has to both be same type
// notify_double(&tweet, &news);
let tweet2 = Tweet {
username: String::from("the_fresh_prince2"),
content: String::from("goodbye mutuals"),
metadata: String::from("los angeles, ca"),
};
// this will work as both args of type Tweet
notify_double(&tweet, &tweet2);
}
|
#[macro_export]
macro_rules! use_contract {
($module: ident, $path: expr) => {
#[allow(dead_code)]
#[allow(missing_docs)]
#[allow(unused_imports)]
#[allow(unused_mut)]
#[allow(unused_variables)]
pub mod $module {
#[derive(ethabi_derive::EthabiContract)]
#[ethabi_contract_options(path = $path)]
struct _Dummy;
}
}
}
|
extern crate pest;
#[macro_use]
extern crate pest_derive;
pub mod ast;
use crate::ast::*;
use pest::iterators::Pairs;
use pest::Parser;
use std::fs;
use std::path::Path;
#[derive(Parser)]
#[grammar = "program.pest"]
struct ProgramParser;
type AstResult = Result<ast::Program, String>;
pub fn parse_file<P: AsRef<Path>>(path: P) -> AstResult {
let program_text = fs::read_to_string(path).map_err(|e| e.to_string())?;
parse_ast(&program_text)
}
pub fn parse_ast(text: &str) -> AstResult {
let mut program: Pairs<Rule> =
ProgramParser::parse(Rule::program, text).map_err(|e| e.to_string())?;
program = program
.next()
.unwrap()
.into_inner()
.next()
.unwrap()
.into_inner();
let optional_declarations = program.next().unwrap();
let (declarations, commands) = match optional_declarations.as_rule() {
Rule::declarations => {
let pairs = optional_declarations.into_inner();
(Some(parse_declarations(pairs)), program.next().unwrap())
}
Rule::commands => (None, optional_declarations),
_ => unreachable!(),
};
let commands = parse_commands(commands.into_inner());
Ok(ast::Program {
declarations,
commands,
})
}
fn parse_declaration(mut pairs: Pairs<Rule>) -> Declaration {
let declaration = pairs.next().unwrap();
match declaration.as_rule() {
Rule::arr_decl => {
let mut parts = declaration.into_inner();
Declaration::Array {
name: parts.next().unwrap().as_str().to_owned(),
start: parts.next().unwrap().as_str().parse().unwrap(),
end: parts.next().unwrap().as_str().parse().unwrap(),
}
}
Rule::var_decl => Declaration::Var {
name: declaration.into_inner().next().unwrap().as_str().to_owned(),
},
_ => unreachable!(),
}
}
fn parse_declarations(pairs: Pairs<Rule>) -> Declarations {
pairs
.map(|pair| parse_declaration(pair.into_inner()))
.collect()
}
fn parse_identifier(mut pairs: Pairs<Rule>) -> Identifier {
let name = pairs.next().unwrap().as_str().to_owned();
if let Some(index) = pairs.next() {
match index.as_rule() {
Rule::pidentifier => Identifier::ArrAccess {
name,
index: index.as_str().to_owned(),
},
Rule::num => Identifier::ArrConstAccess {
name,
index: index.as_str().parse().unwrap(),
},
_ => unreachable!(),
}
} else {
Identifier::VarAccess { name }
}
}
fn parse_value(mut pairs: Pairs<Rule>) -> Value {
let value = pairs.next().unwrap();
match value.as_rule() {
Rule::num => Value::Num(value.as_str().parse().unwrap()),
Rule::identifier => Value::Identifier(parse_identifier(value.into_inner())),
_ => unreachable!(),
}
}
fn parse_condition(mut pairs: Pairs<Rule>) -> Condition {
let left = parse_value(pairs.next().unwrap().into_inner());
let op = match pairs.next().unwrap().as_str() {
"EQ" => RelOp::EQ,
"NEQ" => RelOp::NEQ,
"LEQ" => RelOp::LEQ,
"LE" => RelOp::LT,
"GEQ" => RelOp::GEQ,
"GE" => RelOp::GT,
_ => unreachable!(),
};
let right = parse_value(pairs.next().unwrap().into_inner());
Condition { left, op, right }
}
fn parse_expression(mut pairs: Pairs<Rule>) -> Expression {
let left = parse_value(pairs.next().unwrap().into_inner());
if let Some(op) = pairs.next() {
let op = match op.as_str() {
"PLUS" => ExprOp::Plus,
"MINUS" => ExprOp::Minus,
"TIMES" => ExprOp::Times,
"DIV" => ExprOp::Div,
"MOD" => ExprOp::Mod,
_ => unreachable!(),
};
let right = parse_value(pairs.next().unwrap().into_inner());
Expression::Compound { left, op, right }
} else {
Expression::Simple { value: left }
}
}
fn parse_ifelse(mut pairs: Pairs<Rule>) -> Command {
let condition = parse_condition(pairs.next().unwrap().into_inner());
let positive = parse_commands(pairs.next().unwrap().into_inner());
let negative = parse_commands(pairs.next().unwrap().into_inner());
Command::IfElse {
condition,
positive,
negative,
}
}
fn parse_conditional_command(mut pairs: Pairs<Rule>) -> (Condition, Commands) {
let condition = parse_condition(pairs.next().unwrap().into_inner());
let commands = parse_commands(pairs.next().unwrap().into_inner());
(condition, commands)
}
fn parse_if(pairs: Pairs<Rule>) -> Command {
let (condition, positive) = parse_conditional_command(pairs);
Command::If {
condition,
positive,
}
}
fn parse_while(pairs: Pairs<Rule>) -> Command {
let (condition, commands) = parse_conditional_command(pairs);
Command::While {
condition,
commands,
}
}
fn parse_do(pairs: Pairs<Rule>) -> Command {
let (condition, commands) = parse_conditional_command(pairs);
Command::Do {
condition,
commands,
}
}
fn parse_for(mut pairs: Pairs<Rule>) -> Command {
let counter = pairs.next().unwrap().as_str().to_owned();
let from = parse_value(pairs.next().unwrap().into_inner());
let ascending = match pairs.next().unwrap().as_str() {
"TO" => true,
"DOWNTO" => false,
_ => unreachable!(),
};
let to = parse_value(pairs.next().unwrap().into_inner());
let commands = parse_commands(pairs.next().unwrap().into_inner());
Command::For {
counter,
from,
ascending,
to,
commands,
}
}
fn parse_read(mut pairs: Pairs<Rule>) -> Command {
let target = parse_identifier(pairs.next().unwrap().into_inner());
Command::Read { target }
}
fn parse_write(mut pairs: Pairs<Rule>) -> Command {
let value = parse_value(pairs.next().unwrap().into_inner());
Command::Write { value }
}
fn parse_assign(mut pairs: Pairs<Rule>) -> Command {
let target = parse_identifier(pairs.next().unwrap().into_inner());
let expr = parse_expression(pairs.next().unwrap().into_inner());
Command::Assign { target, expr }
}
fn parse_command(mut pairs: Pairs<Rule>) -> Command {
let command = pairs.next().unwrap();
match command.as_rule() {
Rule::cmd_ifelse => parse_ifelse(command.into_inner()),
Rule::cmd_if => parse_if(command.into_inner()),
Rule::cmd_while => parse_while(command.into_inner()),
Rule::cmd_do => parse_do(command.into_inner()),
Rule::cmd_for => parse_for(command.into_inner()),
Rule::cmd_read => parse_read(command.into_inner()),
Rule::cmd_write => parse_write(command.into_inner()),
Rule::cmd_assign => parse_assign(command.into_inner()),
_ => unreachable!(),
}
}
fn parse_commands(pairs: Pairs<Rule>) -> Commands {
pairs.map(|pair| parse_command(pair.into_inner())).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simplest() {
let text = "BEGIN WRITE 0; END";
let parsed = parse_ast(text);
let expected = ast::Program {
declarations: None,
commands: vec![Command::Write {
value: Value::Num(0),
}],
};
assert_eq!(parsed.unwrap(), expected);
}
#[test]
fn simple_declarations() {
let text = r#"
DECLARE a, b, c(1:10)
BEGIN
WRITE 0;
END
"#;
let parsed = parse_ast(text);
let expected = ast::Program {
declarations: Some(vec![
Declaration::Var {
name: String::from("a"),
},
Declaration::Var {
name: String::from("b"),
},
Declaration::Array {
name: String::from("c"),
start: 1,
end: 10,
},
]),
commands: vec![Command::Write {
value: Value::Num(0),
}],
};
assert_eq!(parsed.unwrap(), expected);
}
#[test]
fn program0() {
let text = r#"
[ binary representation ]
DECLARE
a, b
BEGIN
READ a;
IF a GEQ 0 THEN
WHILE a GE 0 DO
b ASSIGN a DIV 2;
b ASSIGN 2 TIMES b; [ b := a & ~1 ]
IF a GE b THEN
WRITE 1;
ELSE
WRITE 0;
ENDIF
a ASSIGN a DIV 2;
ENDWHILE
ENDIF
END
"#;
let parsed = parse_ast(text);
let var_a = Identifier::VarAccess {
name: String::from("a"),
};
let var_b = Identifier::VarAccess {
name: String::from("b"),
};
let expected = ast::Program {
declarations: Some(vec![
Declaration::Var {
name: String::from("a"),
},
Declaration::Var {
name: String::from("b"),
},
]),
commands: vec![
Command::Read {
target: var_a.clone(),
},
Command::If {
condition: Condition {
left: Value::Identifier(var_a.clone()),
op: RelOp::GEQ,
right: Value::Num(0),
},
positive: vec![Command::While {
condition: Condition {
left: Value::Identifier(var_a.clone()),
op: RelOp::GT,
right: Value::Num(0),
},
commands: vec![
Command::Assign {
target: var_b.clone(),
expr: Expression::Compound {
left: Value::Identifier(var_a.clone()),
op: ExprOp::Div,
right: Value::Num(2),
},
},
Command::Assign {
target: var_b.clone(),
expr: Expression::Compound {
left: Value::Num(2),
op: ExprOp::Times,
right: Value::Identifier(var_b.clone()),
},
},
Command::IfElse {
condition: Condition {
left: Value::Identifier(var_a.clone()),
op: RelOp::GT,
right: Value::Identifier(var_b.clone()),
},
positive: vec![Command::Write {
value: Value::Num(1),
}],
negative: vec![Command::Write {
value: Value::Num(0),
}],
},
Command::Assign {
target: var_a.clone(),
expr: Expression::Compound {
left: Value::Identifier(var_a.clone()),
op: ExprOp::Div,
right: Value::Num(2),
},
},
],
}],
},
],
};
assert_eq!(parsed.unwrap(), expected);
}
#[test]
fn program1() {
let text = r#"
[ Eratostenes' sieve ]
DECLARE
n, j, sieve(2:100)
BEGIN
n ASSIGN 100;
FOR i FROM n DOWNTO 2 DO
sieve(i) ASSIGN 1;
ENDFOR
FOR i FROM 2 TO n DO
IF sieve(i) NEQ 0 THEN
j ASSIGN i PLUS i;
WHILE j LEQ n DO
sieve(j) ASSIGN 0;
j ASSIGN j PLUS i;
ENDWHILE
WRITE i;
ENDIF
ENDFOR
END
"#;
let parsed = parse_ast(text);
let var_n = Identifier::VarAccess {
name: String::from("n"),
};
let var_j = Identifier::VarAccess {
name: String::from("j"),
};
let temp_i = Identifier::VarAccess {
name: String::from("i"),
};
let var_sieve = String::from("sieve");
let expected = ast::Program {
declarations: Some(vec![
Declaration::Var {
name: String::from("n"),
},
Declaration::Var {
name: String::from("j"),
},
Declaration::Array {
name: String::from("sieve"),
start: 2,
end: 100,
},
]),
commands: vec![
Command::Assign {
target: var_n.clone(),
expr: Expression::Simple {
value: Value::Num(100),
},
},
Command::For {
counter: "i".to_string(),
ascending: false,
from: Value::Identifier(var_n.clone()),
to: Value::Num(2),
commands: vec![Command::Assign {
target: Identifier::ArrAccess {
name: var_sieve.clone(),
index: String::from("i"),
},
expr: Expression::Simple {
value: Value::Num(1),
},
}],
},
Command::For {
counter: "i".to_string(),
ascending: true,
from: Value::Num(2),
to: Value::Identifier(var_n.clone()),
commands: vec![Command::If {
condition: Condition {
left: Value::Identifier(Identifier::ArrAccess {
name: var_sieve.clone(),
index: String::from("i"),
}),
op: RelOp::NEQ,
right: Value::Num(0),
},
positive: vec![
Command::Assign {
target: var_j.clone(),
expr: Expression::Compound {
left: Value::Identifier(temp_i.clone()),
op: ExprOp::Plus,
right: Value::Identifier(temp_i.clone()),
},
},
Command::While {
condition: Condition {
left: Value::Identifier(var_j.clone()),
op: RelOp::LEQ,
right: Value::Identifier(var_n.clone()),
},
commands: vec![
Command::Assign {
target: Identifier::ArrAccess {
name: var_sieve.clone(),
index: String::from("j"),
},
expr: Expression::Simple {
value: Value::Num(0),
},
},
Command::Assign {
target: var_j.clone(),
expr: Expression::Compound {
left: Value::Identifier(var_j.clone()),
op: ExprOp::Plus,
right: Value::Identifier(temp_i.clone()),
},
},
],
},
Command::Write {
value: Value::Identifier(temp_i.clone()),
},
],
}],
},
],
};
assert_eq!(parsed.unwrap(), expected);
}
}
|
use std::usize;
use super::keyboard::{Key, KeyStates};
use super::mouse::{EditorMouseState, MouseKeys, MouseState, ViewportBounds};
use crate::message_prelude::*;
use bitflags::bitflags;
#[doc(inline)]
pub use graphene::DocumentResponse;
#[impl_message(Message, InputPreprocessor)]
#[derive(PartialEq, Clone, Debug)]
pub enum InputPreprocessorMessage {
MouseDown(EditorMouseState, ModifierKeys),
MouseUp(EditorMouseState, ModifierKeys),
MouseMove(EditorMouseState, ModifierKeys),
MouseScroll(EditorMouseState, ModifierKeys),
KeyUp(Key, ModifierKeys),
KeyDown(Key, ModifierKeys),
BoundsOfViewports(Vec<ViewportBounds>),
}
bitflags! {
#[derive(Default)]
#[repr(transparent)]
pub struct ModifierKeys: u8 {
const CONTROL = 0b0000_0001;
const SHIFT = 0b0000_0010;
const ALT = 0b0000_0100;
}
}
#[derive(Debug, Default)]
pub struct InputPreprocessor {
pub keyboard: KeyStates,
pub mouse: MouseState,
pub viewport_bounds: ViewportBounds,
}
enum KeyPosition {
Pressed,
Released,
}
impl MessageHandler<InputPreprocessorMessage, ()> for InputPreprocessor {
fn process_action(&mut self, message: InputPreprocessorMessage, _data: (), responses: &mut VecDeque<Message>) {
match message {
InputPreprocessorMessage::MouseMove(editor_mouse_state, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds);
self.mouse.position = mouse_state.position;
responses.push_back(InputMapperMessage::PointerMove.into());
}
InputPreprocessorMessage::MouseDown(editor_mouse_state, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds);
self.mouse.position = mouse_state.position;
if let Some(message) = self.translate_mouse_event(mouse_state, KeyPosition::Pressed) {
responses.push_back(message);
}
}
InputPreprocessorMessage::MouseUp(editor_mouse_state, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds);
self.mouse.position = mouse_state.position;
if let Some(message) = self.translate_mouse_event(mouse_state, KeyPosition::Released) {
responses.push_back(message);
}
}
InputPreprocessorMessage::MouseScroll(editor_mouse_state, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
let mouse_state = editor_mouse_state.to_mouse_state(&self.viewport_bounds);
self.mouse.position = mouse_state.position;
self.mouse.scroll_delta = mouse_state.scroll_delta;
responses.push_back(InputMapperMessage::MouseScroll.into());
}
InputPreprocessorMessage::KeyDown(key, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
self.keyboard.set(key as usize);
responses.push_back(InputMapperMessage::KeyDown(key).into());
}
InputPreprocessorMessage::KeyUp(key, modifier_keys) => {
self.handle_modifier_keys(modifier_keys, responses);
self.keyboard.unset(key as usize);
responses.push_back(InputMapperMessage::KeyUp(key).into());
}
InputPreprocessorMessage::BoundsOfViewports(bounds_of_viewports) => {
assert_eq!(bounds_of_viewports.len(), 1, "Only one viewport is currently supported");
for bounds in bounds_of_viewports {
let new_size = bounds.size();
let existing_size = self.viewport_bounds.size();
let translation = (new_size - existing_size) / 2.;
// TODO: Extend this to multiple viewports instead of setting it to the value of this last loop iteration
self.viewport_bounds = bounds;
responses.push_back(
graphene::Operation::TransformLayer {
path: vec![],
transform: glam::DAffine2::from_translation(translation).to_cols_array(),
}
.into(),
);
}
}
};
}
// clean user input and if possible reconstruct it
// store the changes in the keyboard if it is a key event
// transform canvas coordinates to document coordinates
advertise_actions!();
}
impl InputPreprocessor {
fn translate_mouse_event(&mut self, new_state: MouseState, position: KeyPosition) -> Option<Message> {
// Calculate the difference between the two key states (binary xor)
let diff = self.mouse.mouse_keys ^ new_state.mouse_keys;
self.mouse = new_state;
let key = match diff {
MouseKeys::LEFT => Key::Lmb,
MouseKeys::RIGHT => Key::Rmb,
MouseKeys::MIDDLE => Key::Mmb,
MouseKeys::NONE => return None, // self.mouse.mouse_keys was invalid, e.g. when a drag began outside the client
_ => {
log::warn!("The number of buttons modified at the same time was greater than 1. Modification: {:#010b}", diff);
Key::UnknownKey
}
};
Some(match position {
KeyPosition::Pressed => InputMapperMessage::KeyDown(key).into(),
KeyPosition::Released => InputMapperMessage::KeyUp(key).into(),
})
}
fn handle_modifier_keys(&mut self, modifier_keys: ModifierKeys, responses: &mut VecDeque<Message>) {
self.handle_modifier_key(Key::KeyControl, modifier_keys.contains(ModifierKeys::CONTROL), responses);
self.handle_modifier_key(Key::KeyShift, modifier_keys.contains(ModifierKeys::SHIFT), responses);
self.handle_modifier_key(Key::KeyAlt, modifier_keys.contains(ModifierKeys::ALT), responses);
}
fn handle_modifier_key(&mut self, key: Key, key_is_down: bool, responses: &mut VecDeque<Message>) {
let key_was_down = self.keyboard.get(key as usize);
if key_was_down && !key_is_down {
self.keyboard.unset(key as usize);
responses.push_back(InputMapperMessage::KeyUp(key).into());
} else if !key_was_down && key_is_down {
self.keyboard.set(key as usize);
responses.push_back(InputMapperMessage::KeyDown(key).into());
}
}
}
#[cfg(test)]
mod test {
use crate::input::mouse::ViewportPosition;
use super::*;
#[test]
fn process_action_mouse_move_handle_modifier_keys() {
let mut input_preprocessor = InputPreprocessor::default();
let mut editor_mouse_state = EditorMouseState::new();
editor_mouse_state.editor_position = ViewportPosition::new(4., 809.);
let message = InputPreprocessorMessage::MouseMove(editor_mouse_state, ModifierKeys::ALT);
let mut responses = VecDeque::new();
input_preprocessor.process_action(message, (), &mut responses);
assert!(input_preprocessor.keyboard.get(Key::KeyAlt as usize));
assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyAlt).into()));
}
#[test]
fn process_action_mouse_down_handle_modifier_keys() {
let mut input_preprocessor = InputPreprocessor::default();
let message = InputPreprocessorMessage::MouseDown(EditorMouseState::new(), ModifierKeys::CONTROL);
let mut responses = VecDeque::new();
input_preprocessor.process_action(message, (), &mut responses);
assert!(input_preprocessor.keyboard.get(Key::KeyControl as usize));
assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyControl).into()));
}
#[test]
fn process_action_mouse_up_handle_modifier_keys() {
let mut input_preprocessor = InputPreprocessor::default();
let message = InputPreprocessorMessage::MouseUp(EditorMouseState::new(), ModifierKeys::SHIFT);
let mut responses = VecDeque::new();
input_preprocessor.process_action(message, (), &mut responses);
assert!(input_preprocessor.keyboard.get(Key::KeyShift as usize));
assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyDown(Key::KeyShift).into()));
}
#[test]
fn process_action_key_down_handle_modifier_keys() {
let mut input_preprocessor = InputPreprocessor::default();
input_preprocessor.keyboard.set(Key::KeyControl as usize);
let message = InputPreprocessorMessage::KeyDown(Key::KeyA, ModifierKeys::empty());
let mut responses = VecDeque::new();
input_preprocessor.process_action(message, (), &mut responses);
assert!(!input_preprocessor.keyboard.get(Key::KeyControl as usize));
assert_eq!(responses.pop_front(), Some(InputMapperMessage::KeyUp(Key::KeyControl).into()));
}
#[test]
fn process_action_key_up_handle_modifier_keys() {
let mut input_preprocessor = InputPreprocessor::default();
let message = InputPreprocessorMessage::KeyUp(Key::KeyS, ModifierKeys::CONTROL | ModifierKeys::SHIFT);
let mut responses = VecDeque::new();
input_preprocessor.process_action(message, (), &mut responses);
assert!(input_preprocessor.keyboard.get(Key::KeyControl as usize));
assert!(input_preprocessor.keyboard.get(Key::KeyShift as usize));
assert!(responses.contains(&InputMapperMessage::KeyDown(Key::KeyControl).into()));
assert!(responses.contains(&InputMapperMessage::KeyDown(Key::KeyControl).into()));
}
}
|
#[doc = "Register `OPTR` reader"]
pub type R = crate::R<OPTR_SPEC>;
#[doc = "Register `OPTR` writer"]
pub type W = crate::W<OPTR_SPEC>;
#[doc = "Field `RDP` reader - Read protection level"]
pub type RDP_R = crate::FieldReader;
#[doc = "Field `RDP` writer - Read protection level"]
pub type RDP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `BOR_LEV` reader - BOR reset Level"]
pub type BOR_LEV_R = crate::FieldReader;
#[doc = "Field `BOR_LEV` writer - BOR reset Level"]
pub type BOR_LEV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `nRST_STOP` reader - nRST_STOP"]
pub type N_RST_STOP_R = crate::BitReader;
#[doc = "Field `nRST_STOP` writer - nRST_STOP"]
pub type N_RST_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `nRST_STDBY` reader - nRST_STDBY"]
pub type N_RST_STDBY_R = crate::BitReader;
#[doc = "Field `nRST_STDBY` writer - nRST_STDBY"]
pub type N_RST_STDBY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IDWG_SW` reader - Independent watchdog selection"]
pub type IDWG_SW_R = crate::BitReader<IDWG_SW_A>;
#[doc = "Independent watchdog selection\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum IDWG_SW_A {
#[doc = "0: Hardware independent watchdog"]
Hardware = 0,
#[doc = "1: Software independent watchdog"]
Software = 1,
}
impl From<IDWG_SW_A> for bool {
#[inline(always)]
fn from(variant: IDWG_SW_A) -> Self {
variant as u8 != 0
}
}
impl IDWG_SW_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IDWG_SW_A {
match self.bits {
false => IDWG_SW_A::Hardware,
true => IDWG_SW_A::Software,
}
}
#[doc = "Hardware independent watchdog"]
#[inline(always)]
pub fn is_hardware(&self) -> bool {
*self == IDWG_SW_A::Hardware
}
#[doc = "Software independent watchdog"]
#[inline(always)]
pub fn is_software(&self) -> bool {
*self == IDWG_SW_A::Software
}
}
#[doc = "Field `IDWG_SW` writer - Independent watchdog selection"]
pub type IDWG_SW_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, IDWG_SW_A>;
impl<'a, REG, const O: u8> IDWG_SW_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Hardware independent watchdog"]
#[inline(always)]
pub fn hardware(self) -> &'a mut crate::W<REG> {
self.variant(IDWG_SW_A::Hardware)
}
#[doc = "Software independent watchdog"]
#[inline(always)]
pub fn software(self) -> &'a mut crate::W<REG> {
self.variant(IDWG_SW_A::Software)
}
}
#[doc = "Field `IWDG_STOP` reader - Independent watchdog counter freeze in Stop mode"]
pub type IWDG_STOP_R = crate::BitReader<IWDG_STOP_A>;
#[doc = "Independent watchdog counter freeze in Stop mode\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum IWDG_STOP_A {
#[doc = "0: Independent watchdog counter is frozen in Stop mode"]
Frozen = 0,
#[doc = "1: Independent watchdog counter is running in Stop mode"]
Running = 1,
}
impl From<IWDG_STOP_A> for bool {
#[inline(always)]
fn from(variant: IWDG_STOP_A) -> Self {
variant as u8 != 0
}
}
impl IWDG_STOP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IWDG_STOP_A {
match self.bits {
false => IWDG_STOP_A::Frozen,
true => IWDG_STOP_A::Running,
}
}
#[doc = "Independent watchdog counter is frozen in Stop mode"]
#[inline(always)]
pub fn is_frozen(&self) -> bool {
*self == IWDG_STOP_A::Frozen
}
#[doc = "Independent watchdog counter is running in Stop mode"]
#[inline(always)]
pub fn is_running(&self) -> bool {
*self == IWDG_STOP_A::Running
}
}
#[doc = "Field `IWDG_STOP` writer - Independent watchdog counter freeze in Stop mode"]
pub type IWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, IWDG_STOP_A>;
impl<'a, REG, const O: u8> IWDG_STOP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Independent watchdog counter is frozen in Stop mode"]
#[inline(always)]
pub fn frozen(self) -> &'a mut crate::W<REG> {
self.variant(IWDG_STOP_A::Frozen)
}
#[doc = "Independent watchdog counter is running in Stop mode"]
#[inline(always)]
pub fn running(self) -> &'a mut crate::W<REG> {
self.variant(IWDG_STOP_A::Running)
}
}
#[doc = "Field `IWDG_STDBY` reader - Independent watchdog counter freeze in Standby mode"]
pub type IWDG_STDBY_R = crate::BitReader<IWDG_STDBY_A>;
#[doc = "Independent watchdog counter freeze in Standby mode\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum IWDG_STDBY_A {
#[doc = "0: Independent watchdog counter is frozen in Standby mode"]
Frozen = 0,
#[doc = "1: Independent watchdog counter is running in Standby mode"]
Running = 1,
}
impl From<IWDG_STDBY_A> for bool {
#[inline(always)]
fn from(variant: IWDG_STDBY_A) -> Self {
variant as u8 != 0
}
}
impl IWDG_STDBY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> IWDG_STDBY_A {
match self.bits {
false => IWDG_STDBY_A::Frozen,
true => IWDG_STDBY_A::Running,
}
}
#[doc = "Independent watchdog counter is frozen in Standby mode"]
#[inline(always)]
pub fn is_frozen(&self) -> bool {
*self == IWDG_STDBY_A::Frozen
}
#[doc = "Independent watchdog counter is running in Standby mode"]
#[inline(always)]
pub fn is_running(&self) -> bool {
*self == IWDG_STDBY_A::Running
}
}
#[doc = "Field `IWDG_STDBY` writer - Independent watchdog counter freeze in Standby mode"]
pub type IWDG_STDBY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, IWDG_STDBY_A>;
impl<'a, REG, const O: u8> IWDG_STDBY_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Independent watchdog counter is frozen in Standby mode"]
#[inline(always)]
pub fn frozen(self) -> &'a mut crate::W<REG> {
self.variant(IWDG_STDBY_A::Frozen)
}
#[doc = "Independent watchdog counter is running in Standby mode"]
#[inline(always)]
pub fn running(self) -> &'a mut crate::W<REG> {
self.variant(IWDG_STDBY_A::Running)
}
}
#[doc = "Field `WWDG_SW` reader - Window watchdog selection"]
pub type WWDG_SW_R = crate::BitReader<WWDG_SW_A>;
#[doc = "Window watchdog selection\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum WWDG_SW_A {
#[doc = "0: Hardware window watchdog"]
Hardware = 0,
#[doc = "1: Software window watchdog"]
Software = 1,
}
impl From<WWDG_SW_A> for bool {
#[inline(always)]
fn from(variant: WWDG_SW_A) -> Self {
variant as u8 != 0
}
}
impl WWDG_SW_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WWDG_SW_A {
match self.bits {
false => WWDG_SW_A::Hardware,
true => WWDG_SW_A::Software,
}
}
#[doc = "Hardware window watchdog"]
#[inline(always)]
pub fn is_hardware(&self) -> bool {
*self == WWDG_SW_A::Hardware
}
#[doc = "Software window watchdog"]
#[inline(always)]
pub fn is_software(&self) -> bool {
*self == WWDG_SW_A::Software
}
}
#[doc = "Field `WWDG_SW` writer - Window watchdog selection"]
pub type WWDG_SW_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WWDG_SW_A>;
impl<'a, REG, const O: u8> WWDG_SW_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Hardware window watchdog"]
#[inline(always)]
pub fn hardware(self) -> &'a mut crate::W<REG> {
self.variant(WWDG_SW_A::Hardware)
}
#[doc = "Software window watchdog"]
#[inline(always)]
pub fn software(self) -> &'a mut crate::W<REG> {
self.variant(WWDG_SW_A::Software)
}
}
#[doc = "Field `BFB2` reader - Dual-bank boot"]
pub type BFB2_R = crate::BitReader<BFB2_A>;
#[doc = "Dual-bank boot\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BFB2_A {
#[doc = "0: Dual-bank boot disabled"]
Disabled = 0,
#[doc = "1: Dual-bank boot enabled"]
Enabled = 1,
}
impl From<BFB2_A> for bool {
#[inline(always)]
fn from(variant: BFB2_A) -> Self {
variant as u8 != 0
}
}
impl BFB2_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BFB2_A {
match self.bits {
false => BFB2_A::Disabled,
true => BFB2_A::Enabled,
}
}
#[doc = "Dual-bank boot disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == BFB2_A::Disabled
}
#[doc = "Dual-bank boot enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == BFB2_A::Enabled
}
}
#[doc = "Field `BFB2` writer - Dual-bank boot"]
pub type BFB2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, BFB2_A>;
impl<'a, REG, const O: u8> BFB2_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Dual-bank boot disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(BFB2_A::Disabled)
}
#[doc = "Dual-bank boot enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(BFB2_A::Enabled)
}
}
#[doc = "Field `DB1M` reader - "]
pub type DB1M_R = crate::BitReader<DB1M_A>;
#[doc = "\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DB1M_A {
#[doc = "0: Single Flash contiguous address in Bank 1"]
SingleBank = 0,
#[doc = "1: Dual-bank Flash with contiguous addresses"]
DualBank = 1,
}
impl From<DB1M_A> for bool {
#[inline(always)]
fn from(variant: DB1M_A) -> Self {
variant as u8 != 0
}
}
impl DB1M_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DB1M_A {
match self.bits {
false => DB1M_A::SingleBank,
true => DB1M_A::DualBank,
}
}
#[doc = "Single Flash contiguous address in Bank 1"]
#[inline(always)]
pub fn is_single_bank(&self) -> bool {
*self == DB1M_A::SingleBank
}
#[doc = "Dual-bank Flash with contiguous addresses"]
#[inline(always)]
pub fn is_dual_bank(&self) -> bool {
*self == DB1M_A::DualBank
}
}
#[doc = "Field `DB1M` writer - "]
pub type DB1M_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DB1M_A>;
impl<'a, REG, const O: u8> DB1M_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Single Flash contiguous address in Bank 1"]
#[inline(always)]
pub fn single_bank(self) -> &'a mut crate::W<REG> {
self.variant(DB1M_A::SingleBank)
}
#[doc = "Dual-bank Flash with contiguous addresses"]
#[inline(always)]
pub fn dual_bank(self) -> &'a mut crate::W<REG> {
self.variant(DB1M_A::DualBank)
}
}
#[doc = "Field `DBANK` reader - "]
pub type DBANK_R = crate::BitReader<DBANK_A>;
#[doc = "\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DBANK_A {
#[doc = "0: Single-bank mode with 128 bits data read width"]
SingleBankMode = 0,
#[doc = "1: Dual-bank mode with 64 bits data"]
DualBankMode = 1,
}
impl From<DBANK_A> for bool {
#[inline(always)]
fn from(variant: DBANK_A) -> Self {
variant as u8 != 0
}
}
impl DBANK_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DBANK_A {
match self.bits {
false => DBANK_A::SingleBankMode,
true => DBANK_A::DualBankMode,
}
}
#[doc = "Single-bank mode with 128 bits data read width"]
#[inline(always)]
pub fn is_single_bank_mode(&self) -> bool {
*self == DBANK_A::SingleBankMode
}
#[doc = "Dual-bank mode with 64 bits data"]
#[inline(always)]
pub fn is_dual_bank_mode(&self) -> bool {
*self == DBANK_A::DualBankMode
}
}
#[doc = "Field `DBANK` writer - "]
pub type DBANK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DBANK_A>;
impl<'a, REG, const O: u8> DBANK_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Single-bank mode with 128 bits data read width"]
#[inline(always)]
pub fn single_bank_mode(self) -> &'a mut crate::W<REG> {
self.variant(DBANK_A::SingleBankMode)
}
#[doc = "Dual-bank mode with 64 bits data"]
#[inline(always)]
pub fn dual_bank_mode(self) -> &'a mut crate::W<REG> {
self.variant(DBANK_A::DualBankMode)
}
}
#[doc = "Field `nBOOT1` reader - Boot configuration"]
pub type N_BOOT1_R = crate::BitReader;
#[doc = "Field `nBOOT1` writer - Boot configuration"]
pub type N_BOOT1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SRAM2_PE` reader - SRAM2 parity check enable"]
pub type SRAM2_PE_R = crate::BitReader<SRAM2_PE_A>;
#[doc = "SRAM2 parity check enable\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SRAM2_PE_A {
#[doc = "0: SRAM2 parity check enabled"]
Enabled = 0,
#[doc = "1: SRAM2 parity check disabled"]
Disabled = 1,
}
impl From<SRAM2_PE_A> for bool {
#[inline(always)]
fn from(variant: SRAM2_PE_A) -> Self {
variant as u8 != 0
}
}
impl SRAM2_PE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SRAM2_PE_A {
match self.bits {
false => SRAM2_PE_A::Enabled,
true => SRAM2_PE_A::Disabled,
}
}
#[doc = "SRAM2 parity check enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == SRAM2_PE_A::Enabled
}
#[doc = "SRAM2 parity check disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == SRAM2_PE_A::Disabled
}
}
#[doc = "Field `SRAM2_PE` writer - SRAM2 parity check enable"]
pub type SRAM2_PE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SRAM2_PE_A>;
impl<'a, REG, const O: u8> SRAM2_PE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "SRAM2 parity check enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(SRAM2_PE_A::Enabled)
}
#[doc = "SRAM2 parity check disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(SRAM2_PE_A::Disabled)
}
}
#[doc = "Field `SRAM2_RST` reader - SRAM2 Erase when system reset"]
pub type SRAM2_RST_R = crate::BitReader<SRAM2_RST_A>;
#[doc = "SRAM2 Erase when system reset\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SRAM2_RST_A {
#[doc = "0: SRAM2 erased when a system reset occurs"]
Enabled = 0,
#[doc = "1: SRAM2 is not erased when a system reset occurs"]
Disabled = 1,
}
impl From<SRAM2_RST_A> for bool {
#[inline(always)]
fn from(variant: SRAM2_RST_A) -> Self {
variant as u8 != 0
}
}
impl SRAM2_RST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SRAM2_RST_A {
match self.bits {
false => SRAM2_RST_A::Enabled,
true => SRAM2_RST_A::Disabled,
}
}
#[doc = "SRAM2 erased when a system reset occurs"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == SRAM2_RST_A::Enabled
}
#[doc = "SRAM2 is not erased when a system reset occurs"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == SRAM2_RST_A::Disabled
}
}
#[doc = "Field `SRAM2_RST` writer - SRAM2 Erase when system reset"]
pub type SRAM2_RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SRAM2_RST_A>;
impl<'a, REG, const O: u8> SRAM2_RST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "SRAM2 erased when a system reset occurs"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(SRAM2_RST_A::Enabled)
}
#[doc = "SRAM2 is not erased when a system reset occurs"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(SRAM2_RST_A::Disabled)
}
}
#[doc = "Field `nSWBOOT0` reader - nSWBOOT0 option bit"]
pub type N_SWBOOT0_R = crate::BitReader<N_SWBOOT0_A>;
#[doc = "nSWBOOT0 option bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum N_SWBOOT0_A {
#[doc = "0: BOOT0 taken from the option bit nBOOT0"]
OptionBit = 0,
#[doc = "1: BOOT0 taken from PH3/BOOT0 pin"]
Pin = 1,
}
impl From<N_SWBOOT0_A> for bool {
#[inline(always)]
fn from(variant: N_SWBOOT0_A) -> Self {
variant as u8 != 0
}
}
impl N_SWBOOT0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> N_SWBOOT0_A {
match self.bits {
false => N_SWBOOT0_A::OptionBit,
true => N_SWBOOT0_A::Pin,
}
}
#[doc = "BOOT0 taken from the option bit nBOOT0"]
#[inline(always)]
pub fn is_option_bit(&self) -> bool {
*self == N_SWBOOT0_A::OptionBit
}
#[doc = "BOOT0 taken from PH3/BOOT0 pin"]
#[inline(always)]
pub fn is_pin(&self) -> bool {
*self == N_SWBOOT0_A::Pin
}
}
#[doc = "Field `nSWBOOT0` writer - nSWBOOT0 option bit"]
pub type N_SWBOOT0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, N_SWBOOT0_A>;
impl<'a, REG, const O: u8> N_SWBOOT0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "BOOT0 taken from the option bit nBOOT0"]
#[inline(always)]
pub fn option_bit(self) -> &'a mut crate::W<REG> {
self.variant(N_SWBOOT0_A::OptionBit)
}
#[doc = "BOOT0 taken from PH3/BOOT0 pin"]
#[inline(always)]
pub fn pin(self) -> &'a mut crate::W<REG> {
self.variant(N_SWBOOT0_A::Pin)
}
}
#[doc = "Field `nBOOT0` reader - nBOOT0 option bit"]
pub type N_BOOT0_R = crate::BitReader<N_BOOT0_A>;
#[doc = "nBOOT0 option bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum N_BOOT0_A {
#[doc = "0: nBOOT0 = 0"]
Disabled = 0,
#[doc = "1: nBOOT0 = 1"]
Enabled = 1,
}
impl From<N_BOOT0_A> for bool {
#[inline(always)]
fn from(variant: N_BOOT0_A) -> Self {
variant as u8 != 0
}
}
impl N_BOOT0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> N_BOOT0_A {
match self.bits {
false => N_BOOT0_A::Disabled,
true => N_BOOT0_A::Enabled,
}
}
#[doc = "nBOOT0 = 0"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == N_BOOT0_A::Disabled
}
#[doc = "nBOOT0 = 1"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == N_BOOT0_A::Enabled
}
}
#[doc = "Field `nBOOT0` writer - nBOOT0 option bit"]
pub type N_BOOT0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, N_BOOT0_A>;
impl<'a, REG, const O: u8> N_BOOT0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "nBOOT0 = 0"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(N_BOOT0_A::Disabled)
}
#[doc = "nBOOT0 = 1"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(N_BOOT0_A::Enabled)
}
}
impl R {
#[doc = "Bits 0:7 - Read protection level"]
#[inline(always)]
pub fn rdp(&self) -> RDP_R {
RDP_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:10 - BOR reset Level"]
#[inline(always)]
pub fn bor_lev(&self) -> BOR_LEV_R {
BOR_LEV_R::new(((self.bits >> 8) & 7) as u8)
}
#[doc = "Bit 12 - nRST_STOP"]
#[inline(always)]
pub fn n_rst_stop(&self) -> N_RST_STOP_R {
N_RST_STOP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - nRST_STDBY"]
#[inline(always)]
pub fn n_rst_stdby(&self) -> N_RST_STDBY_R {
N_RST_STDBY_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 16 - Independent watchdog selection"]
#[inline(always)]
pub fn idwg_sw(&self) -> IDWG_SW_R {
IDWG_SW_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Independent watchdog counter freeze in Stop mode"]
#[inline(always)]
pub fn iwdg_stop(&self) -> IWDG_STOP_R {
IWDG_STOP_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Independent watchdog counter freeze in Standby mode"]
#[inline(always)]
pub fn iwdg_stdby(&self) -> IWDG_STDBY_R {
IWDG_STDBY_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Window watchdog selection"]
#[inline(always)]
pub fn wwdg_sw(&self) -> WWDG_SW_R {
WWDG_SW_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Dual-bank boot"]
#[inline(always)]
pub fn bfb2(&self) -> BFB2_R {
BFB2_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21"]
#[inline(always)]
pub fn db1m(&self) -> DB1M_R {
DB1M_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22"]
#[inline(always)]
pub fn dbank(&self) -> DBANK_R {
DBANK_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - Boot configuration"]
#[inline(always)]
pub fn n_boot1(&self) -> N_BOOT1_R {
N_BOOT1_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - SRAM2 parity check enable"]
#[inline(always)]
pub fn sram2_pe(&self) -> SRAM2_PE_R {
SRAM2_PE_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - SRAM2 Erase when system reset"]
#[inline(always)]
pub fn sram2_rst(&self) -> SRAM2_RST_R {
SRAM2_RST_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - nSWBOOT0 option bit"]
#[inline(always)]
pub fn n_swboot0(&self) -> N_SWBOOT0_R {
N_SWBOOT0_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - nBOOT0 option bit"]
#[inline(always)]
pub fn n_boot0(&self) -> N_BOOT0_R {
N_BOOT0_R::new(((self.bits >> 27) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:7 - Read protection level"]
#[inline(always)]
#[must_use]
pub fn rdp(&mut self) -> RDP_W<OPTR_SPEC, 0> {
RDP_W::new(self)
}
#[doc = "Bits 8:10 - BOR reset Level"]
#[inline(always)]
#[must_use]
pub fn bor_lev(&mut self) -> BOR_LEV_W<OPTR_SPEC, 8> {
BOR_LEV_W::new(self)
}
#[doc = "Bit 12 - nRST_STOP"]
#[inline(always)]
#[must_use]
pub fn n_rst_stop(&mut self) -> N_RST_STOP_W<OPTR_SPEC, 12> {
N_RST_STOP_W::new(self)
}
#[doc = "Bit 13 - nRST_STDBY"]
#[inline(always)]
#[must_use]
pub fn n_rst_stdby(&mut self) -> N_RST_STDBY_W<OPTR_SPEC, 13> {
N_RST_STDBY_W::new(self)
}
#[doc = "Bit 16 - Independent watchdog selection"]
#[inline(always)]
#[must_use]
pub fn idwg_sw(&mut self) -> IDWG_SW_W<OPTR_SPEC, 16> {
IDWG_SW_W::new(self)
}
#[doc = "Bit 17 - Independent watchdog counter freeze in Stop mode"]
#[inline(always)]
#[must_use]
pub fn iwdg_stop(&mut self) -> IWDG_STOP_W<OPTR_SPEC, 17> {
IWDG_STOP_W::new(self)
}
#[doc = "Bit 18 - Independent watchdog counter freeze in Standby mode"]
#[inline(always)]
#[must_use]
pub fn iwdg_stdby(&mut self) -> IWDG_STDBY_W<OPTR_SPEC, 18> {
IWDG_STDBY_W::new(self)
}
#[doc = "Bit 19 - Window watchdog selection"]
#[inline(always)]
#[must_use]
pub fn wwdg_sw(&mut self) -> WWDG_SW_W<OPTR_SPEC, 19> {
WWDG_SW_W::new(self)
}
#[doc = "Bit 20 - Dual-bank boot"]
#[inline(always)]
#[must_use]
pub fn bfb2(&mut self) -> BFB2_W<OPTR_SPEC, 20> {
BFB2_W::new(self)
}
#[doc = "Bit 21"]
#[inline(always)]
#[must_use]
pub fn db1m(&mut self) -> DB1M_W<OPTR_SPEC, 21> {
DB1M_W::new(self)
}
#[doc = "Bit 22"]
#[inline(always)]
#[must_use]
pub fn dbank(&mut self) -> DBANK_W<OPTR_SPEC, 22> {
DBANK_W::new(self)
}
#[doc = "Bit 23 - Boot configuration"]
#[inline(always)]
#[must_use]
pub fn n_boot1(&mut self) -> N_BOOT1_W<OPTR_SPEC, 23> {
N_BOOT1_W::new(self)
}
#[doc = "Bit 24 - SRAM2 parity check enable"]
#[inline(always)]
#[must_use]
pub fn sram2_pe(&mut self) -> SRAM2_PE_W<OPTR_SPEC, 24> {
SRAM2_PE_W::new(self)
}
#[doc = "Bit 25 - SRAM2 Erase when system reset"]
#[inline(always)]
#[must_use]
pub fn sram2_rst(&mut self) -> SRAM2_RST_W<OPTR_SPEC, 25> {
SRAM2_RST_W::new(self)
}
#[doc = "Bit 26 - nSWBOOT0 option bit"]
#[inline(always)]
#[must_use]
pub fn n_swboot0(&mut self) -> N_SWBOOT0_W<OPTR_SPEC, 26> {
N_SWBOOT0_W::new(self)
}
#[doc = "Bit 27 - nBOOT0 option bit"]
#[inline(always)]
#[must_use]
pub fn n_boot0(&mut self) -> N_BOOT0_W<OPTR_SPEC, 27> {
N_BOOT0_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Flash option register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`optr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`optr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OPTR_SPEC;
impl crate::RegisterSpec for OPTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`optr::R`](R) reader structure"]
impl crate::Readable for OPTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`optr::W`](W) writer structure"]
impl crate::Writable for OPTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets OPTR to value 0xffef_f8aa"]
impl crate::Resettable for OPTR_SPEC {
const RESET_VALUE: Self::Ux = 0xffef_f8aa;
}
|
use mongodb::oid::ObjectId;
use primitive_types::U256;
use rulinalg::matrix::Matrix;
use rust_hope::schemes::she::hope::hopeCiphertext;
use serde::{Serialize, Deserialize};
/// A structured encryption key
#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]
pub struct SEKey {
pub _k1: U256,
pub _k2: U256,
pub _k3: U256,
}
/// A structured encryption ciphertext
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub struct SECiphertext {
pub _gamma: Matrix<U256>, // first 12 byte for (ObjectId xor PRP:F_k(x,y)) then 20 byte for Hash_k(w)
pub _vertex: Vec<SECTVertex>,
pub _edge: Vec<SECTEdge>,
}
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub enum SECTObject {
Edge(SECTEdge),
Vertex(SECTVertex),
}
/// An AC17 Public Key (PK)
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub struct SECTVertex {
pub _id: ObjectId,
pub _coord: SECTCoord,
pub _meta: AESCiphertext,
}
/// An AC17 Public Key (PK)
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub struct SECTEdge {
pub _id: ObjectId,
pub _length: hopeCiphertext,
pub _oneway: i8,
pub _meta: AESCiphertext,
}
/// An AC17 Public Key (PK)
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub struct SECTCoord {
pub _long: hopeCiphertext,
pub _lat: hopeCiphertext,
}
/// An AC17 Public Key (PK)
#[derive(Serialize, Deserialize, PartialEq, Clone)]
pub struct AESCiphertext {
//pub _length: hopeCiphertext,
pub _iv: U256,
pub _ct: Vec<u8>,
}
impl SECTEdge {
pub fn new(_length: hopeCiphertext, _oneway: i8, _meta: AESCiphertext) -> SECTEdge {
SECTEdge {
_id: ObjectId::new().unwrap(),
_length: _length,
_oneway: _oneway,
_meta: _meta,
}
}
}
impl SECTVertex {
pub fn new(_long: hopeCiphertext, _lat: hopeCiphertext, _meta: AESCiphertext) -> SECTVertex {
SECTVertex {
_id: ObjectId::new().unwrap(),
_coord: SECTCoord::new(_long, _lat),
_meta: _meta,
}
}
}
impl SECTCoord {
pub fn new(_long: hopeCiphertext, _lat: hopeCiphertext) -> SECTCoord {
SECTCoord {
_long: _long,
_lat: _lat,
}
}
}
|
//
// Copyright (c) 2017, 2020 ADLINK Technology Inc.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
// which is available at https://www.apache.org/licenses/LICENSE-2.0.
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
//
// Contributors:
// ADLINK zenoh team, <zenoh@adlink-labs.tech>
//
use async_std::prelude::*;
use zenoh::net::*;
#[async_std::main]
async fn main() {
// Open a Zenoh session using the default configuration
let session = open(Config::default(), None).await.unwrap();
// Declare a subscriber for a given key selector
let selector = "/region01/**";
let mut subscriber = session
.declare_subscriber(&selector.into(), &SubInfo::default())
.await
.unwrap();
// Process the incoming publications
while let Some(sample) = subscriber.stream().next().await {
let bytes = sample.payload.to_vec();
let value = String::from_utf8_lossy(&bytes);
println!(">> Received ('{}': '{}')", sample.res_name, value);
}
}
|
// Copyright 2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! A RPC handler to create sync states for light clients.
//! Currently only usable with BABE + GRANDPA.
use sp_runtime::traits::{Block as BlockT, NumberFor};
use sp_blockchain::HeaderBackend;
use std::sync::Arc;
use sp_runtime::generic::BlockId;
use jsonrpc_derive::rpc;
type SharedAuthoritySet<TBl> =
sc_finality_grandpa::SharedAuthoritySet<<TBl as BlockT>::Hash, NumberFor<TBl>>;
type SharedEpochChanges<TBl> = sc_consensus_epochs::SharedEpochChanges<TBl, sc_consensus_babe::Epoch>;
struct Error(sp_blockchain::Error);
impl From<Error> for jsonrpc_core::Error {
fn from(error: Error) -> Self {
jsonrpc_core::Error {
message: error.0.to_string(),
code: jsonrpc_core::ErrorCode::ServerError(1),
data: None,
}
}
}
/// An api for sync state RPC calls.
#[rpc]
pub trait SyncStateRpcApi {
/// Returns the json-serialized chainspec running the node, with a sync state.
#[rpc(name = "sync_state_genSyncSpec", returns = "jsonrpc_core::Value")]
fn system_gen_sync_spec(&self, raw: bool)
-> jsonrpc_core::Result<jsonrpc_core::Value>;
}
/// The handler for sync state RPC calls.
pub struct SyncStateRpcHandler<TBl: BlockT, TCl> {
chain_spec: Box<dyn sc_chain_spec::ChainSpec>,
client: Arc<TCl>,
shared_authority_set: SharedAuthoritySet<TBl>,
shared_epoch_changes: SharedEpochChanges<TBl>,
deny_unsafe: sc_rpc_api::DenyUnsafe,
}
impl<TBl, TCl> SyncStateRpcHandler<TBl, TCl>
where
TBl: BlockT,
TCl: HeaderBackend<TBl> + sc_client_api::AuxStore + 'static,
{
/// Create a new handler.
pub fn new(
chain_spec: Box<dyn sc_chain_spec::ChainSpec>,
client: Arc<TCl>,
shared_authority_set: SharedAuthoritySet<TBl>,
shared_epoch_changes: SharedEpochChanges<TBl>,
deny_unsafe: sc_rpc_api::DenyUnsafe,
) -> Self {
Self {
chain_spec, client, shared_authority_set, shared_epoch_changes, deny_unsafe,
}
}
fn build_sync_state(&self) -> Result<sc_chain_spec::LightSyncState<TBl>, sp_blockchain::Error> {
let finalized_hash = self.client.info().finalized_hash;
let finalized_header = self.client.header(BlockId::Hash(finalized_hash))?
.ok_or_else(|| sp_blockchain::Error::Msg(
format!("Failed to get the header for block {:?}", finalized_hash)
))?;
let finalized_block_weight = sc_consensus_babe::aux_schema::load_block_weight(
&*self.client,
finalized_hash,
)?
.ok_or_else(|| sp_blockchain::Error::Msg(
format!("Failed to load the block weight for block {:?}", finalized_hash)
))?;
Ok(sc_chain_spec::LightSyncState {
finalized_block_header: finalized_header,
babe_epoch_changes: self.shared_epoch_changes.lock().clone(),
babe_finalized_block_weight: finalized_block_weight,
grandpa_authority_set: self.shared_authority_set.clone_inner(),
})
}
}
impl<TBl, TCl> SyncStateRpcApi for SyncStateRpcHandler<TBl, TCl>
where
TBl: BlockT,
TCl: HeaderBackend<TBl> + sc_client_api::AuxStore + 'static,
{
fn system_gen_sync_spec(&self, raw: bool)
-> jsonrpc_core::Result<jsonrpc_core::Value>
{
if let Err(err) = self.deny_unsafe.check_if_safe() {
return Err(err.into());
}
let mut chain_spec = self.chain_spec.cloned_box();
let sync_state = self.build_sync_state().map_err(Error)?;
chain_spec.set_light_sync_state(sync_state.to_serializable());
let string = chain_spec.as_json(raw).map_err(map_error)?;
serde_json::from_str(&string).map_err(|err| map_error(err.to_string()))
}
}
fn map_error(error: String) -> jsonrpc_core::Error {
Error(sp_blockchain::Error::Msg(error)).into()
}
|
use anyhow::Result;
use rustcoalescence_algorithms::Algorithm;
#[cfg(feature = "rustcoalescence-algorithms-cuda")]
use rustcoalescence_algorithms_cuda::CudaAlgorithm;
#[cfg(feature = "rustcoalescence-algorithms-independent")]
use rustcoalescence_algorithms_independent::IndependentAlgorithm;
#[cfg(feature = "rustcoalescence-algorithms-monolithic")]
use rustcoalescence_algorithms_monolithic::{
classical::ClassicalAlgorithm, gillespie::GillespieAlgorithm,
skipping_gillespie::SkippingGillespieAlgorithm,
};
use necsim_core::reporter::Reporter;
use necsim_core_bond::NonNegativeF64;
use necsim_impls_no_std::cogs::origin_sampler::pre_sampler::OriginPreSampler;
use necsim_partitioning_core::LocalPartition;
use rustcoalescence_scenarios::{
almost_infinite::AlmostInfiniteScenario, non_spatial::NonSpatialScenario,
spatially_explicit::SpatiallyExplicitScenario, spatially_implicit::SpatiallyImplicitScenario,
Scenario,
};
use crate::args::{Algorithm as AlgorithmArgs, CommonArgs, Scenario as ScenarioArgs};
#[allow(clippy::too_many_lines, clippy::boxed_local)]
pub fn simulate_with_logger<R: Reporter, P: LocalPartition<R>>(
mut local_partition: Box<P>,
common_args: CommonArgs,
scenario: ScenarioArgs,
) -> Result<()> {
if local_partition.get_number_of_partitions().get() <= 1 {
info!("The simulation will be run in monolithic mode.");
} else {
info!(
"The simulation will be distributed across {} partitions.",
local_partition.get_number_of_partitions().get()
);
}
let pre_sampler = OriginPreSampler::all().percentage(common_args.sample_percentage.get());
let (time, steps): (NonNegativeF64, u64) = crate::match_scenario_algorithm!(
(common_args.algorithm, scenario => scenario)
{
#[cfg(feature = "rustcoalescence-algorithms-monolithic")]
AlgorithmArgs::Classical(algorithm_args) => {
ClassicalAlgorithm::initialise_and_simulate(
algorithm_args,
common_args.seed,
scenario,
pre_sampler,
&mut *local_partition,
)
.into_ok()
},
#[cfg(feature = "rustcoalescence-algorithms-monolithic")]
AlgorithmArgs::Gillespie(algorithm_args) => {
GillespieAlgorithm::initialise_and_simulate(
algorithm_args,
common_args.seed,
scenario,
pre_sampler,
&mut *local_partition,
)
.into_ok()
},
#[cfg(feature = "rustcoalescence-algorithms-monolithic")]
AlgorithmArgs::SkippingGillespie(algorithm_args) => {
SkippingGillespieAlgorithm::initialise_and_simulate(
algorithm_args,
common_args.seed,
scenario,
pre_sampler,
&mut *local_partition,
)
.into_ok()
},
#[cfg(feature = "rustcoalescence-algorithms-independent")]
AlgorithmArgs::Independent(algorithm_args) => {
IndependentAlgorithm::initialise_and_simulate(
algorithm_args,
common_args.seed,
scenario,
pre_sampler,
&mut *local_partition,
)
.into_ok()
},
#[cfg(feature = "rustcoalescence-algorithms-cuda")]
AlgorithmArgs::Cuda(algorithm_args) => {
CudaAlgorithm::initialise_and_simulate(
algorithm_args,
common_args.seed,
scenario,
pre_sampler,
&mut *local_partition,
)?
}
<=>
ScenarioArgs::SpatiallyExplicit(scenario_args) => {
SpatiallyExplicitScenario::initialise(
scenario_args,
common_args.speciation_probability_per_generation,
)?
},
ScenarioArgs::NonSpatial(scenario_args) => {
NonSpatialScenario::initialise(
scenario_args,
common_args.speciation_probability_per_generation,
)
.into_ok()
},
ScenarioArgs::AlmostInfinite(scenario_args) => {
AlmostInfiniteScenario::initialise(
scenario_args,
common_args.speciation_probability_per_generation,
)
.into_ok()
},
ScenarioArgs::SpatiallyImplicit(scenario_args) => {
SpatiallyImplicitScenario::initialise(
scenario_args,
common_args.speciation_probability_per_generation,
)
.into_ok()
}
});
if log::log_enabled!(log::Level::Info) {
println!("\n");
println!("{:=^80}", " Reporter Summary ");
println!();
}
local_partition.finalise_reporting();
if log::log_enabled!(log::Level::Info) {
println!();
println!("{:=^80}", " Reporter Summary ");
println!();
}
info!(
"The simulation finished at time {} after {} steps.\n",
time.get(),
steps
);
Ok(())
}
|
use std::collections::HashSet;
fn format_radix(mut x: u64, radix: u64) -> String {
let mut result = vec![];
loop {
let m = x % radix;
x = x / radix;
result.push(std::char::from_digit(m as u32, radix as u32).unwrap());
if x == 0 {
break;
}
}
result.into_iter().rev().collect()
}
fn parse36(s: &String) -> u64 {
u64::from_str_radix(s, 36).unwrap()
}
fn inc_password(pw: &String) -> String {
format_radix(parse36(&pw) + 1, 36)
}
fn is_valid_password(pw: &String) -> bool {
let mut contains_digit = false;
let mut contains_three_increasing = false;
let mut contains_illegal = false;
let mut pairs = HashSet::new();
let mut prev1: Option<char> = None;
let mut prev2: Option<char> = None;
for c in pw.chars() {
if c.is_numeric() {
contains_digit = true;
}
let c36 = parse36(&c.to_string());
match prev1 {
Some(p1) => {
match prev2 {
Some(p2) => {
if c >= 'c' && parse36(&p2.to_string()) == c36 - 2 && parse36(&p1.to_string()) == c36 - 1 {
contains_three_increasing = true;
}
},
None => {}
}
if p1 == c {
pairs.insert(c);
}
},
None => {}
}
if c == 'i' || c == 'o' || c == 'l' {
contains_illegal = true;
}
prev2 = prev1;
prev1 = Some(c);
}
!contains_digit && contains_three_increasing && !contains_illegal && pairs.len() >= 2
}
fn next_valid_password(pw: &String) -> String {
let mut result = inc_password(&pw);
while !is_valid_password(&result) {
result = inc_password(&result);
}
result
}
fn main() {
let input = String::from("hepxcrrq");
let result_a = next_valid_password(&input);
let result_b = next_valid_password(&result_a);
println!("A: {}", result_a);
println!("B: {}", result_b);
}
|
use crate::back::spv::{helpers, Instruction};
use spirv::{Op, Word};
pub(super) enum Signedness {
Unsigned = 0,
Signed = 1,
}
//
// Debug Instructions
//
pub(super) fn instruction_source(
source_language: spirv::SourceLanguage,
version: u32,
) -> Instruction {
let mut instruction = Instruction::new(Op::Source);
instruction.add_operand(source_language as u32);
instruction.add_operands(helpers::bytes_to_words(&version.to_le_bytes()));
instruction
}
pub(super) fn instruction_name(target_id: Word, name: &str) -> Instruction {
let mut instruction = Instruction::new(Op::Name);
instruction.add_operand(target_id);
instruction.add_operands(helpers::string_to_words(name));
instruction
}
//
// Annotation Instructions
//
pub(super) fn instruction_decorate(
target_id: Word,
decoration: spirv::Decoration,
operands: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::Decorate);
instruction.add_operand(target_id);
instruction.add_operand(decoration as u32);
for operand in operands {
instruction.add_operand(*operand)
}
instruction
}
//
// Extension Instructions
//
pub(super) fn instruction_ext_inst_import(id: Word, name: &str) -> Instruction {
let mut instruction = Instruction::new(Op::ExtInstImport);
instruction.set_result(id);
instruction.add_operands(helpers::string_to_words(name));
instruction
}
//
// Mode-Setting Instructions
//
pub(super) fn instruction_memory_model(
addressing_model: spirv::AddressingModel,
memory_model: spirv::MemoryModel,
) -> Instruction {
let mut instruction = Instruction::new(Op::MemoryModel);
instruction.add_operand(addressing_model as u32);
instruction.add_operand(memory_model as u32);
instruction
}
pub(super) fn instruction_entry_point(
execution_model: spirv::ExecutionModel,
entry_point_id: Word,
name: &str,
interface_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::EntryPoint);
instruction.add_operand(execution_model as u32);
instruction.add_operand(entry_point_id);
instruction.add_operands(helpers::string_to_words(name));
for interface_id in interface_ids {
instruction.add_operand(*interface_id);
}
instruction
}
pub(super) fn instruction_execution_mode(
entry_point_id: Word,
execution_mode: spirv::ExecutionMode,
args: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::ExecutionMode);
instruction.add_operand(entry_point_id);
instruction.add_operand(execution_mode as u32);
for arg in args {
instruction.add_operand(*arg);
}
instruction
}
pub(super) fn instruction_capability(capability: spirv::Capability) -> Instruction {
let mut instruction = Instruction::new(Op::Capability);
instruction.add_operand(capability as u32);
instruction
}
//
// Type-Declaration Instructions
//
pub(super) fn instruction_type_void(id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeVoid);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_type_bool(id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeBool);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_type_int(id: Word, width: Word, signedness: Signedness) -> Instruction {
let mut instruction = Instruction::new(Op::TypeInt);
instruction.set_result(id);
instruction.add_operand(width);
instruction.add_operand(signedness as u32);
instruction
}
pub(super) fn instruction_type_float(id: Word, width: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeFloat);
instruction.set_result(id);
instruction.add_operand(width);
instruction
}
pub(super) fn instruction_type_vector(
id: Word,
component_type_id: Word,
component_count: crate::VectorSize,
) -> Instruction {
let mut instruction = Instruction::new(Op::TypeVector);
instruction.set_result(id);
instruction.add_operand(component_type_id);
instruction.add_operand(component_count as u32);
instruction
}
pub(super) fn instruction_type_matrix(
id: Word,
column_type_id: Word,
column_count: crate::VectorSize,
) -> Instruction {
let mut instruction = Instruction::new(Op::TypeMatrix);
instruction.set_result(id);
instruction.add_operand(column_type_id);
instruction.add_operand(column_count as u32);
instruction
}
pub(super) fn instruction_type_image(
id: Word,
sampled_type_id: Word,
dim: spirv::Dim,
arrayed: bool,
image_class: crate::ImageClass,
) -> Instruction {
let mut instruction = Instruction::new(Op::TypeImage);
instruction.set_result(id);
instruction.add_operand(sampled_type_id);
instruction.add_operand(dim as u32);
instruction.add_operand(match image_class {
crate::ImageClass::Depth => 1,
_ => 0,
});
instruction.add_operand(arrayed as u32);
instruction.add_operand(match image_class {
crate::ImageClass::Sampled { multi: true, .. } => 1,
_ => 0,
});
instruction.add_operand(match image_class {
crate::ImageClass::Sampled { .. } => 1,
_ => 0,
});
let format = match image_class {
crate::ImageClass::Storage(format) => match format {
crate::StorageFormat::R8Unorm => spirv::ImageFormat::R8,
crate::StorageFormat::R8Snorm => spirv::ImageFormat::R8Snorm,
crate::StorageFormat::R8Uint => spirv::ImageFormat::R8ui,
crate::StorageFormat::R8Sint => spirv::ImageFormat::R8i,
crate::StorageFormat::R16Uint => spirv::ImageFormat::R16ui,
crate::StorageFormat::R16Sint => spirv::ImageFormat::R16i,
crate::StorageFormat::R16Float => spirv::ImageFormat::R16f,
crate::StorageFormat::Rg8Unorm => spirv::ImageFormat::Rg8,
crate::StorageFormat::Rg8Snorm => spirv::ImageFormat::Rg8Snorm,
crate::StorageFormat::Rg8Uint => spirv::ImageFormat::Rg8ui,
crate::StorageFormat::Rg8Sint => spirv::ImageFormat::Rg8i,
crate::StorageFormat::R32Uint => spirv::ImageFormat::R32ui,
crate::StorageFormat::R32Sint => spirv::ImageFormat::R32i,
crate::StorageFormat::R32Float => spirv::ImageFormat::R32f,
crate::StorageFormat::Rg16Uint => spirv::ImageFormat::Rg16ui,
crate::StorageFormat::Rg16Sint => spirv::ImageFormat::Rg16i,
crate::StorageFormat::Rg16Float => spirv::ImageFormat::Rg16f,
crate::StorageFormat::Rgba8Unorm => spirv::ImageFormat::Rgba8,
crate::StorageFormat::Rgba8Snorm => spirv::ImageFormat::Rgba8Snorm,
crate::StorageFormat::Rgba8Uint => spirv::ImageFormat::Rgba8ui,
crate::StorageFormat::Rgba8Sint => spirv::ImageFormat::Rgba8i,
crate::StorageFormat::Rgb10a2Unorm => spirv::ImageFormat::Rgb10a2ui,
crate::StorageFormat::Rg11b10Float => spirv::ImageFormat::R11fG11fB10f,
crate::StorageFormat::Rg32Uint => spirv::ImageFormat::Rg32ui,
crate::StorageFormat::Rg32Sint => spirv::ImageFormat::Rg32i,
crate::StorageFormat::Rg32Float => spirv::ImageFormat::Rg32f,
crate::StorageFormat::Rgba16Uint => spirv::ImageFormat::Rgba16ui,
crate::StorageFormat::Rgba16Sint => spirv::ImageFormat::Rgba16i,
crate::StorageFormat::Rgba16Float => spirv::ImageFormat::Rgba16f,
crate::StorageFormat::Rgba32Uint => spirv::ImageFormat::Rgba32ui,
crate::StorageFormat::Rgba32Sint => spirv::ImageFormat::Rgba32i,
crate::StorageFormat::Rgba32Float => spirv::ImageFormat::Rgba32f,
},
_ => spirv::ImageFormat::Unknown,
};
instruction.add_operand(format as u32);
instruction
}
pub(super) fn instruction_type_sampler(id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeSampler);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_type_sampled_image(id: Word, image_type_id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeSampledImage);
instruction.set_result(id);
instruction.add_operand(image_type_id);
instruction
}
pub(super) fn instruction_type_array(
id: Word,
element_type_id: Word,
length_id: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::TypeArray);
instruction.set_result(id);
instruction.add_operand(element_type_id);
instruction.add_operand(length_id);
instruction
}
pub(super) fn instruction_type_runtime_array(id: Word, element_type_id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::TypeRuntimeArray);
instruction.set_result(id);
instruction.add_operand(element_type_id);
instruction
}
pub(super) fn instruction_type_struct(id: Word, member_ids: &[Word]) -> Instruction {
let mut instruction = Instruction::new(Op::TypeStruct);
instruction.set_result(id);
for member_id in member_ids {
instruction.add_operand(*member_id)
}
instruction
}
pub(super) fn instruction_type_pointer(
id: Word,
storage_class: spirv::StorageClass,
type_id: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::TypePointer);
instruction.set_result(id);
instruction.add_operand(storage_class as u32);
instruction.add_operand(type_id);
instruction
}
pub(super) fn instruction_type_function(
id: Word,
return_type_id: Word,
parameter_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::TypeFunction);
instruction.set_result(id);
instruction.add_operand(return_type_id);
for parameter_id in parameter_ids {
instruction.add_operand(*parameter_id);
}
instruction
}
//
// Constant-Creation Instructions
//
pub(super) fn instruction_constant_true(result_type_id: Word, id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::ConstantTrue);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_constant_false(result_type_id: Word, id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::ConstantFalse);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_constant(result_type_id: Word, id: Word, values: &[Word]) -> Instruction {
let mut instruction = Instruction::new(Op::Constant);
instruction.set_type(result_type_id);
instruction.set_result(id);
for value in values {
instruction.add_operand(*value);
}
instruction
}
pub(super) fn instruction_constant_composite(
result_type_id: Word,
id: Word,
constituent_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::ConstantComposite);
instruction.set_type(result_type_id);
instruction.set_result(id);
for constituent_id in constituent_ids {
instruction.add_operand(*constituent_id);
}
instruction
}
//
// Memory Instructions
//
pub(super) fn instruction_variable(
result_type_id: Word,
id: Word,
storage_class: spirv::StorageClass,
initializer_id: Option<Word>,
) -> Instruction {
let mut instruction = Instruction::new(Op::Variable);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(storage_class as u32);
if let Some(initializer_id) = initializer_id {
instruction.add_operand(initializer_id);
}
instruction
}
pub(super) fn instruction_load(
result_type_id: Word,
id: Word,
pointer_id: Word,
memory_access: Option<spirv::MemoryAccess>,
) -> Instruction {
let mut instruction = Instruction::new(Op::Load);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(pointer_id);
if let Some(memory_access) = memory_access {
instruction.add_operand(memory_access.bits());
}
instruction
}
pub(super) fn instruction_store(
pointer_type_id: Word,
object_id: Word,
memory_access: Option<spirv::MemoryAccess>,
) -> Instruction {
let mut instruction = Instruction::new(Op::Store);
instruction.add_operand(pointer_type_id);
instruction.add_operand(object_id);
if let Some(memory_access) = memory_access {
instruction.add_operand(memory_access.bits());
}
instruction
}
pub(super) fn instruction_access_chain(
result_type_id: Word,
id: Word,
base_id: Word,
index_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::AccessChain);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(base_id);
for index_id in index_ids {
instruction.add_operand(*index_id);
}
instruction
}
//
// Function Instructions
//
pub(super) fn instruction_function(
return_type_id: Word,
id: Word,
function_control: spirv::FunctionControl,
function_type_id: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::Function);
instruction.set_type(return_type_id);
instruction.set_result(id);
instruction.add_operand(function_control.bits());
instruction.add_operand(function_type_id);
instruction
}
pub(super) fn instruction_function_parameter(result_type_id: Word, id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::FunctionParameter);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_function_end() -> Instruction {
Instruction::new(Op::FunctionEnd)
}
pub(super) fn instruction_function_call(
result_type_id: Word,
id: Word,
function_id: Word,
argument_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::FunctionCall);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(function_id);
for argument_id in argument_ids {
instruction.add_operand(*argument_id);
}
instruction
}
//
// Image Instructions
//
pub(super) fn instruction_sampled_image(
result_type_id: Word,
id: Word,
image: Word,
sampler: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::SampledImage);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(image);
instruction.add_operand(sampler);
instruction
}
pub(super) fn instruction_image_sample_implicit_lod(
result_type_id: Word,
id: Word,
sampled_image: Word,
coordinates: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::ImageSampleImplicitLod);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(sampled_image);
instruction.add_operand(coordinates);
instruction
}
//
// Conversion Instructions
//
pub(super) fn instruction_unary(
op: Op,
result_type_id: Word,
id: Word,
value: Word,
) -> Instruction {
let mut instruction = Instruction::new(op);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(value);
instruction
}
//
// Composite Instructions
//
pub(super) fn instruction_composite_construct(
result_type_id: Word,
id: Word,
constituent_ids: &[Word],
) -> Instruction {
let mut instruction = Instruction::new(Op::CompositeConstruct);
instruction.set_type(result_type_id);
instruction.set_result(id);
for constituent_id in constituent_ids {
instruction.add_operand(*constituent_id);
}
instruction
}
//
// Arithmetic Instructions
//
pub(super) fn instruction_binary(
op: Op,
result_type_id: Word,
id: Word,
operand_1: Word,
operand_2: Word,
) -> Instruction {
let mut instruction = Instruction::new(op);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(operand_1);
instruction.add_operand(operand_2);
instruction
}
//
// Bit Instructions
//
//
// Relational and Logical Instructions
//
pub(super) fn instruction_conditional_select(
result_type_id: Word,
id: Word,
condition: Word,
) -> Instruction {
let mut instruction = Instruction::new(Op::Select);
instruction.set_type(result_type_id);
instruction.set_result(id);
instruction.add_operand(condition);
instruction
}
//
// Derivative Instructions
//
//
// Control-Flow Instructions
//
pub(super) fn instruction_label(id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::Label);
instruction.set_result(id);
instruction
}
pub(super) fn instruction_return() -> Instruction {
Instruction::new(Op::Return)
}
pub(super) fn instruction_return_value(value_id: Word) -> Instruction {
let mut instruction = Instruction::new(Op::ReturnValue);
instruction.add_operand(value_id);
instruction
}
//
// Atomic Instructions
//
//
// Primitive Instructions
//
|
extern crate serde;
pub mod animation;
pub mod carry;
pub mod components;
pub mod crab_ai;
pub mod entities;
pub mod map;
pub mod movement;
pub mod saveload_system;
pub mod state;
pub mod string_writer;
pub mod weapons;
|
fn naruhodo(s: &str) -> String {
s.chars() // camelCase
.rev() // esaClemac
.collect::<String>() // esaClemac
.split_inclusive(char::is_uppercase) // iter(esaC lemac)
.map(|x| x.chars().rev().collect()) // Case camel
.rev() // iter(camel Case)
.collect::<Vec<String>>() // camel, Case
.join(" ") // camel Case
}
fn solution(s: &str) -> String {
s.chars()
.map(|c| {
if c.is_uppercase() {
format!(" {}", c)
} else {
format!("{}", c)
}
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_solution() {
assert_eq!(solution("camelCasing"), "camel Casing");
assert_eq!(solution("camelCasingTest"), "camel Casing Test");
}
#[test]
fn test_solution2() {
assert_eq!(naruhodo("camelCasing"), "camel Casing");
assert_eq!(naruhodo("camelCasingTest"), "camel Casing Test");
}
}
|
fn main() {
// my test inputs: 236491 to 713787.
let p = all_possibilities(236491, 713787);
println!("Checking {} possibilities", p.len());
let candidate_pws: Vec<&i32> = p
.iter()
.filter(|x| has_double(x) && !has_decreasing_digits(x))
.collect();
println!("Candidate count: {}", candidate_pws.len());
// 153758 was too high
}
fn all_possibilities(start: i32, end: i32) -> Vec<i32> {
let mut v = Vec::new();
// upper bound is exclusive but we want it inclusive, add one:
for i in start..end + 1 {
// println!("pushing {}", i);
v.push(i);
}
return v;
}
// has two adjacent digits that are the same
fn has_double(input: &i32) -> bool {
let s = format!("{}", input);
let mut last_seen: char = ' ';
let mut doubles_found = 0;
for i in s.chars() {
if last_seen == ' ' {
last_seen = i;
continue;
}
if last_seen == i {
// a double!
doubles_found += 1;
}
last_seen = i;
}
return doubles_found >= 1;
}
// numbers go down, such as 2230 or 5676
fn has_decreasing_digits(input: &i32) -> bool {
let s = format!("{}", input);
let mut highest_number = 10;
let mut at_first_char = true;
for i in s.chars() {
if at_first_char {
at_first_char = false;
highest_number = i.to_digit(10).unwrap()
}
// https://stackoverflow.com/questions/43983414/how-to-convert-a-rust-char-to-an-integer-so-that-1-becomes-1
if i.to_digit(10).unwrap() >= highest_number {
highest_number = i.to_digit(10).unwrap();
} else {
// this digit is smaller than something we've seen before
return true;
}
}
return false;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_doubles() {
assert_eq!(has_double(&123456), false);
assert_eq!(has_double(&1232456), false);
assert_eq!(has_double(&122), true);
assert_eq!(has_double(&123789), false);
assert_eq!(has_double(&111111), true);
}
#[test]
fn test_decreasing() {
assert_eq!(has_decreasing_digits(&123456), false);
assert_eq!(has_decreasing_digits(&122), false);
assert_eq!(has_decreasing_digits(&1221), true);
assert_eq!(has_decreasing_digits(&12210), true);
assert_eq!(has_decreasing_digits(&10), true);
assert_eq!(has_decreasing_digits(&223450), true);
}
#[test]
fn test_possibilty_generator() {
assert_eq!(all_possibilities(0, 2), vec!(0, 1, 2));
assert_eq!(all_possibilities(1, 2), vec!(1, 2));
}
}
|
use crate::error::{Error, Result};
pub(super) struct Commands(Vec<Box<Command>>);
impl Commands {
pub(super) fn new() -> Commands {
Commands(vec![Box::new(Hello), Box::new(Bye)])
}
pub(super) fn handle(&self, message: &str) -> Result<String> {
let mut split = message.splitn(2, ' ');
let command = split.next().unwrap();
match self.0.iter().find(|c| c.name() == command) {
Some(c) => c.run(&split.next()),
None => Err(Error::CommandUnknown),
}
}
}
trait Command {
fn name(&self) -> &'static str;
fn run(&self, args: &Option<&str>) -> Result<String>;
}
struct Hello;
impl Command for Hello {
fn name(&self) -> &'static str {
"!hello"
}
fn run(&self, _: &Option<&str>) -> Result<String> {
Ok("Hello there!".into())
}
}
struct Bye;
impl Command for Bye {
fn name(&self) -> &'static str {
"!bye"
}
fn run(&self, _: &Option<&str>) -> Result<String> {
Ok("Good bye!".into())
}
}
|
// q0151_reverse_words_in_a_string
struct Solution;
impl Solution {
pub fn reverse_words(s: String) -> String {
let r: Vec<&str> = s.split_ascii_whitespace().rev().collect();
let mut ret = String::with_capacity(s.len());
for ss in r.into_iter() {
ret.push_str(ss);
ret.push(' ');
}
ret.pop();
ret
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(
String::from("blue is sky the"),
Solution::reverse_words(String::from("the sky is blue"))
);
assert_eq!(
String::from("world! hello"),
Solution::reverse_words(String::from(" hello world! "))
);
assert_eq!(
String::from("example good a"),
Solution::reverse_words(String::from("a good example"))
);
}
}
|
fn parse_numbers() -> Vec<i32> {
let mut numbers: Vec<_> = aoc2020::input_file!("10")
.lines()
.map(|x| x.parse().unwrap())
.collect();
numbers.push(0);
numbers.sort_unstable();
numbers.push(numbers.last().unwrap() + 3);
numbers
}
fn part1(numbers: &[i32]) -> i32 {
let (diff1, diff3) = numbers[1..]
.iter()
.scan(numbers[0], |prev, &n| {
let diff = n - *prev;
*prev = n;
Some(diff)
})
.fold((0, 0), |(diff1, diff3), d| match d {
1 => (diff1 + 1, diff3),
3 => (diff1, diff3 + 1),
_ => (diff1, diff3),
});
diff1 * diff3
}
fn part2(numbers: &[i32]) -> usize {
let n = numbers.len();
let mut counts = vec![0; n];
counts[n - 1] = 1;
for i in (0..n - 1).rev() {
counts[i] = (i + 1..n - 1)
.take_while(|&k| numbers[k] <= numbers[i] + 3)
.map(|k| counts[k])
.sum();
}
counts[0]
}
fn main() {
println!("Day 10:");
let numbers = parse_numbers();
println!("1: {}", part1(&numbers));
println!("2: {}", part2(&numbers));
}
|
use std::fmt::Debug;
use std::future::Future;
/// Runtime abstracts away the underlying runtime we use for task scheduling.
pub trait Runtime: Clone + Send + Sync + Unpin + Debug {
/// Spawn a [`Future`] to run as a task in some executor.
fn spawn<F>(&self, future: F)
where
F: Future<Output = ()> + Send + 'static;
}
|
use crate::query_testing::{parse_position_comments, Assertion};
use ansi_term::Colour;
use anyhow::{anyhow, Result};
use std::fs;
use std::path::Path;
use tree_sitter::Point;
use tree_sitter_highlight::{Highlight, HighlightConfiguration, HighlightEvent, Highlighter};
use tree_sitter_loader::Loader;
#[derive(Debug)]
pub struct Failure {
row: usize,
column: usize,
expected_highlight: String,
actual_highlights: Vec<String>,
}
impl std::error::Error for Failure {}
impl std::fmt::Display for Failure {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Failure - row: {}, column: {}, expected highlight '{}', actual highlights: ",
self.row, self.column, self.expected_highlight
)?;
if self.actual_highlights.is_empty() {
write!(f, "none.")?;
} else {
for (i, actual_highlight) in self.actual_highlights.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "'{}'", actual_highlight)?;
}
}
Ok(())
}
}
pub fn test_highlights(loader: &Loader, directory: &Path, apply_all_captures: bool) -> Result<()> {
println!("syntax highlighting:");
test_highlights_indented(loader, directory, apply_all_captures, 2)
}
fn test_highlights_indented(
loader: &Loader,
directory: &Path,
apply_all_captures: bool,
indent_level: usize,
) -> Result<()> {
let mut failed = false;
let mut highlighter = Highlighter::new();
for highlight_test_file in fs::read_dir(directory)? {
let highlight_test_file = highlight_test_file?;
let test_file_path = highlight_test_file.path();
let test_file_name = highlight_test_file.file_name();
print!(
"{indent:indent_level$}",
indent = "",
indent_level = indent_level * 2
);
if test_file_path.is_dir() && !test_file_path.read_dir()?.next().is_none() {
println!("{}:", test_file_name.into_string().unwrap());
if let Err(_) = test_highlights_indented(
loader,
&test_file_path,
apply_all_captures,
indent_level + 1,
) {
failed = true;
}
} else {
let (language, language_config) = loader
.language_configuration_for_file_name(&test_file_path)?
.ok_or_else(|| anyhow!("No language found for path {:?}", test_file_path))?;
let highlight_config = language_config
.highlight_config(language, apply_all_captures, None)?
.ok_or_else(|| anyhow!("No highlighting config found for {:?}", test_file_path))?;
match test_highlight(
&loader,
&mut highlighter,
highlight_config,
fs::read(&test_file_path)?.as_slice(),
) {
Ok(assertion_count) => {
println!(
"✓ {} ({} assertions)",
Colour::Green.paint(test_file_name.to_string_lossy().as_ref()),
assertion_count
);
}
Err(e) => {
println!(
"✗ {}",
Colour::Red.paint(test_file_name.to_string_lossy().as_ref())
);
println!(
"{indent:indent_level$} {e}",
indent = "",
indent_level = indent_level * 2
);
failed = true;
}
}
}
}
if failed {
Err(anyhow!(""))
} else {
Ok(())
}
}
pub fn iterate_assertions(
assertions: &Vec<Assertion>,
highlights: &Vec<(Point, Point, Highlight)>,
highlight_names: &Vec<String>,
) -> Result<usize> {
// Iterate through all of the highlighting assertions, checking each one against the
// actual highlights.
let mut i = 0;
let mut actual_highlights = Vec::new();
for Assertion {
position,
negative,
expected_capture_name: expected_highlight,
} in assertions
{
let mut passed = false;
actual_highlights.clear();
'highlight_loop: loop {
// The assertions are ordered by position, so skip past all of the highlights that
// end at or before this assertion's position.
if let Some(highlight) = highlights.get(i) {
if highlight.1 <= *position {
i += 1;
continue;
}
// Iterate through all of the highlights that start at or before this assertion's,
// position, looking for one that matches the assertion.
let mut j = i;
while let (false, Some(highlight)) = (passed, highlights.get(j)) {
if highlight.0 > *position {
break 'highlight_loop;
}
// If the highlight matches the assertion, or if the highlight doesn't
// match the assertion but it's negative, this test passes. Otherwise,
// add this highlight to the list of actual highlights that span the
// assertion's position, in order to generate an error message in the event
// of a failure.
let highlight_name = &highlight_names[(highlight.2).0];
if (*highlight_name == *expected_highlight) == !negative {
passed = true;
break 'highlight_loop;
} else {
actual_highlights.push(highlight_name);
}
j += 1;
}
} else {
break;
}
}
if !passed {
return Err(Failure {
row: position.row,
column: position.column,
expected_highlight: expected_highlight.clone(),
actual_highlights: actual_highlights.into_iter().cloned().collect(),
}
.into());
}
}
Ok(assertions.len())
}
pub fn test_highlight(
loader: &Loader,
highlighter: &mut Highlighter,
highlight_config: &HighlightConfiguration,
source: &[u8],
) -> Result<usize> {
// Highlight the file, and parse out all of the highlighting assertions.
let highlight_names = loader.highlight_names();
let highlights = get_highlight_positions(loader, highlighter, highlight_config, source)?;
let assertions =
parse_position_comments(highlighter.parser(), highlight_config.language, source)?;
iterate_assertions(&assertions, &highlights, &highlight_names)
}
pub fn get_highlight_positions(
loader: &Loader,
highlighter: &mut Highlighter,
highlight_config: &HighlightConfiguration,
source: &[u8],
) -> Result<Vec<(Point, Point, Highlight)>> {
let mut row = 0;
let mut column = 0;
let mut byte_offset = 0;
let mut was_newline = false;
let mut result = Vec::new();
let mut highlight_stack = Vec::new();
let source = String::from_utf8_lossy(source);
let mut char_indices = source.char_indices();
for event in highlighter.highlight(highlight_config, source.as_bytes(), None, |string| {
loader.highlight_config_for_injection_string(string, highlight_config.apply_all_captures)
})? {
match event? {
HighlightEvent::HighlightStart(h) => highlight_stack.push(h),
HighlightEvent::HighlightEnd => {
highlight_stack.pop();
}
HighlightEvent::Source { start, end } => {
let mut start_position = Point::new(row, column);
while byte_offset < end {
if byte_offset <= start {
start_position = Point::new(row, column);
}
if let Some((i, c)) = char_indices.next() {
if was_newline {
row += 1;
column = 0;
} else {
column += i - byte_offset;
}
was_newline = c == '\n';
byte_offset = i;
} else {
break;
}
}
if let Some(highlight) = highlight_stack.last() {
result.push((start_position, Point::new(row, column), *highlight))
}
}
}
}
Ok(result)
}
|
use crate::config::Config;
use crate::config::ResolvOptions;
use crate::config::sort_root_name_servers;
use crate::cache::Cache;
use crate::name_server::NameServer;
use crate::protocol::Protocol;
use std::io;
use std::net::IpAddr;
use std::net::Ipv4Addr;
use std::net::Ipv6Addr;
use std::pin::Pin;
use std::future::Future;
use std::sync::Arc;
use std::sync::RwLock;
// https://tools.ietf.org/html/rfc952
#[cfg(any(target_os = "macos",
target_os = "linux",
target_os = "freebsd",
target_os = "netbsd"))]
const DEFAULT_HOSTS_FILE_PATH: &str = "/etc/hosts";
#[cfg(windows)]
const DEFAULT_HOSTS_FILE_PATH: &str = "C:\\Windows\\System32\\Drivers\\etc\\hosts";
// http://man7.org/linux/man-pages/man5/resolv.conf.5.html
#[cfg(any(target_os = "macos",
target_os = "linux",
target_os = "freebsd",
target_os = "netbsd"))]
const RESOLVER_CONFIG_FILE_PATH: &str = "/etc/resolv.conf";
static GLOBAL_MESSAGE_ID: std::sync::atomic::AtomicU16 = std::sync::atomic::AtomicU16::new(0);
fn id() -> u16 {
let prev_id = GLOBAL_MESSAGE_ID.load(std::sync::atomic::Ordering::SeqCst);
let curr_id = prev_id.checked_add(1).unwrap_or(0);
GLOBAL_MESSAGE_ID.store(curr_id, std::sync::atomic::Ordering::SeqCst);
curr_id
}
#[derive(Debug, Clone)]
pub enum ResolverKind {
// ZoneServer
AuthoritativeServer,
ProxyServer,
RecursiveServer,
Named,
Proxy,
Resolver,
}
#[derive(Debug)]
pub struct SystemHostsResolver {
}
#[derive(Debug, Clone)]
pub struct LocalMulticastResolver {
pub options: ResolvOptions,
pub cache: Cache,
}
#[derive(Debug, Clone)]
pub struct ProxyResolver {
pub options: ResolvOptions,
pub cache: Cache,
pub upstream: Vec<NameServer>,
}
#[derive(Debug)]
pub struct RecursiveResolver {
pub options: ResolvOptions,
pub cache: Cache,
pub root_name_servers: Vec<NameServer>,
}
#[derive(Debug)]
pub struct StubResolver {
pub options: ResolvOptions,
pub cache: Cache,
// Note: 解析查询按顺序来:
// SystemHostsFile --> mDNS --> ProxyResolver --> RecursiveResolver
pub system_hosts_resolver: Option<SystemHostsResolver>,
pub local_multicast_resolver: Option<LocalMulticastResolver>,
pub system_resolver: Option<ProxyResolver>, // 系统 /etc/resolv.conf 文件里面设置的上游解析器。
pub proxy_resolver: Option<ProxyResolver>, // 用户自定义的上游解析器
pub recursive_resolver: Option<RecursiveResolver>, // 从 ROOT-SERVER 开始层层迭代的解析器
}
impl StubResolver {
pub async fn new(config: Config) -> Result<Self, io::Error> {
let cache = Cache::new();
let root_name_servers = sort_root_name_servers().await;
Ok(Self {
options: config.resolv_options,
cache: cache.clone(),
system_hosts_resolver: None,
local_multicast_resolver: None,
system_resolver: None,
proxy_resolver: None,
recursive_resolver: Some(RecursiveResolver {
options: config.resolv_options,
cache: cache.clone(),
root_name_servers,
})
})
}
pub fn query(&self, req: wire::Request) -> Pin<Box<dyn Future<Output = Result<wire::Response, wire::Error> > + Send >> {
todo!()
}
pub fn resolve<B: AsRef<[u8]>>(&self, pkt: B) -> Pin<Box<dyn Future<Output = Result<Vec<u8>, wire::Error> > + Send >> {
todo!()
}
}
#[derive(Debug, Clone)]
pub struct Query {
state: Arc<RwLock<ResolvOptions>>,
cache: Option<Cache>,
request: Arc<wire::Request>,
name_servers: Arc<Vec<NameServer>>,
}
unsafe impl Send for Query { }
pub enum ResolvError {
Illegal,
Unrecognized,
}
// pub async fn handle_req(buf: &mut [u8], amt: usize) -> Result<usize, ()> {
// if amt < 12 {
// return Err(());
// }
// let pkt = &buf[..amt];
// match deserialize_req(pkt) {
// Err(_) => {
// let mut flags = wire::HeaderFlags::new_unchecked(u16::from_be_bytes([ buf[3], buf[4] ]));
// flags.set_qr(true);
// flags.set_rcode(wire::ResponseCode::FORMAT_ERROR);
// let flags_bytes = flags.bits().to_be_bytes();
// buf[3] = flags_bytes[0];
// buf[4] = flags_bytes[1];
// return Ok(amt);
// },
// Ok(req) => {
// todo!()
// },
// }
// }
// pub async fn run_udp_server<A: ToSocketAddrs>(addr: A) -> Result<(), tokio::io::Error> {
// let mut buf = [0u8; MAX_BUFF_SIZE];
// let mut listener = UdpSocket::bind(addr).await?;
// let cache = Cache::new();
// let root_name_servers = Arc::new(root_name_servers().await);
// let use_ipv4 = is_support_ipv4().await;
// let use_ipv6 = is_support_ipv6().await;
// info!("DNS service running at udp://{} ...", listener.local_addr()?);
// loop {
// match listener.recv_from(&mut buf).await {
// Ok((0, _)) => continue,
// Ok((amt, peer_addr)) => {
// let pkt = &buf[..amt];
// trace!("[UDP] received {:?} bytes from {:?}", pkt.len(), peer_addr);
// match deserialize_req(pkt) {
// Ok(req) => {
// let req_id = req.id;
// let raw_questions = req.questions.clone();
// let query = Query {
// state: Arc::new(RwLock::new(ResolvOptions {
// timeout: Duration::from_secs(30),
// attempts: 32,
// use_ipv4: use_ipv4,
// use_ipv6: use_ipv6,
// })),
// cache: Some(cache.clone()),
// request: Arc::new(req),
// name_servers: root_name_servers.clone(),
// };
// match rquery(query).await {
// Ok(mut res) => {
// debug!("{:?}", &res);
// if res.answers.len() > 0 {
// info!("Answers Section:");
// for rr in res.answers.iter() {
// info!("{:?}", rr);
// }
// } else {
// }
// res.questions = raw_questions;
// res.id = req_id;
// // res.authorities.clear();
// // res.additionals.clear();
// res.flags |= wire::ReprFlags::RA;
// if let Ok(amt) = serialize_res(&res, &mut buf) {
// let pkt = &buf[..amt];
// let _ = listener.send_to(pkt, peer_addr).await;
// }
// },
// Err(e) => {
// error!("{:?}", e);
// },
// }
// debug!("QUERY DONE.\n\n");
// },
// Err(e) => {
// error!("{:?}", e);
// },
// }
// },
// Err(e) => error!("{:?}", e),
// }
// }
// }
pub struct Service {
pub stub: StubResolver,
pub udp_socket: Option<tokio::net::UdpSocket>,
pub tcp_listener: Option<tokio::net::TcpListener>,
pub tls_listener: Option<crate::net::tls::TlsListener>,
pub h2_listener: Option<crate::net::h2::server::H2Listener>,
}
impl Service {
pub async fn new(config: Config) -> Result<Self, io::Error> {
let udp_socket = match config.bind.socket_addr_by(Protocol::Udp) {
Some(sa) => Some(tokio::net::UdpSocket::bind(sa).await?),
None => None,
};
let tcp_listener = match config.bind.socket_addr_by(Protocol::Tcp) {
Some(sa) => Some(tokio::net::TcpListener::bind(sa).await?),
None => None,
};
// let tls_listener = match config.bind.socket_addr_by(Protocol::Tls) {
// Some(sa) => Some(crate::net::tls::TlsListener::bind(sa).await?),
// None => None,
// };
let tls_listener = None;
let h2_listener = None;
let stub = StubResolver::new(config).await?;
Ok(Self {
stub,
udp_socket,
tcp_listener,
tls_listener,
h2_listener,
})
}
pub async fn resolve<B: AsRef<[u8]>>(&self, pkt: B) -> Result<Vec<u8>, wire::Error> {
todo!()
}
pub fn run_forever(&self) -> Result<(), io::Error> {
todo!()
}
}
|
use byteorder::{ByteOrder, ReadBytesExt, WriteBytesExt};
use crate::{
errors::*,
TsResolution
};
use std::{
borrow::Cow,
io::Read,
io::Write,
time::Duration
};
/// Describes a pcap packet header.
#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)]
pub struct PacketHeader {
/// Timestamp in seconds
pub ts_sec: u32,
/// Nanosecond part of the timestamp
pub ts_nsec: u32,
/// Number of octets of the packet saved in file
pub incl_len: u32,
/// Original length of the packet on the wire
pub orig_len: u32
}
impl PacketHeader {
/// Create a new `PacketHeader` with the given parameters.
pub fn new(ts_sec: u32, ts_nsec: u32, incl_len:u32, orig_len:u32) -> PacketHeader {
PacketHeader {
ts_sec,
ts_nsec,
incl_len,
orig_len
}
}
/// Create a new `PacketHeader` from a reader.
pub fn from_reader<R: Read, B: ByteOrder>(reader: &mut R, ts_resolution: TsResolution) -> ResultParsing<PacketHeader> {
let ts_sec = reader.read_u32::<B>()?;
let mut ts_nsec = reader.read_u32::<B>()?;
if ts_resolution == TsResolution::MicroSecond {
ts_nsec *= 1000;
}
let incl_len = reader.read_u32::<B>()?;
let orig_len = reader.read_u32::<B>()?;
if incl_len > 0xFFFF {
return Err(PcapError::InvalidField("PacketHeader incl_len > 0xFFFF"));
}
if orig_len > 0xFFFF {
return Err(PcapError::InvalidField("PacketHeader orig_len > 0xFFFF"));
}
if incl_len > orig_len {
return Err(PcapError::InvalidField("PacketHeader incl_len > orig_len"));
}
Ok(
PacketHeader {
ts_sec,
ts_nsec,
incl_len,
orig_len
}
)
}
/// Create a new `PacketHeader` from a slice.
pub fn from_slice<B: ByteOrder>(mut slice: &[u8], ts_resolution: TsResolution) -> ResultParsing<(&[u8], PacketHeader)> {
//Header len
if slice.len() < 16 {
return Err(PcapError::IncompleteBuffer(16 - slice.len()));
}
let header = Self::from_reader::<_, B>(&mut slice, ts_resolution)?;
Ok((slice, header))
}
/// Write a `PcapHeader` to a writer.
///
/// Writes 24B in the writer on success.
pub fn write_to< W: Write, B: ByteOrder>(&self, writer: &mut W, ts_resolution: TsResolution) -> ResultParsing<()> {
let mut ts_unsec = self.ts_nsec;
if ts_resolution == TsResolution::MicroSecond{
ts_unsec /= 1000;
}
writer.write_u32::<B>(self.ts_sec)?;
writer.write_u32::<B>(ts_unsec)?;
writer.write_u32::<B>(self.incl_len)?;
writer.write_u32::<B>(self.orig_len)?;
Ok(())
}
/// Get the timestamp of the packet as a Duration
pub fn timestamp(&self) -> Duration {
Duration::new(self.ts_sec.into(), self.ts_nsec)
}
}
/// Packet with its header and data.
///
/// The payload can be owned or borrowed.
#[derive(Clone, Debug)]
pub struct Packet<'a> {
/// Header of the packet
pub header: PacketHeader,
/// Payload, owned or borrowed, of the packet
pub data: Cow<'a, [u8]>
}
impl<'a> Packet<'a> {
/// Create a new borrowed `Packet` with the given parameters.
pub fn new(ts_sec: u32, ts_nsec: u32, data: &'a [u8], orig_len: u32) -> Packet<'a> {
let header = PacketHeader {
ts_sec,
ts_nsec,
incl_len: data.len() as u32,
orig_len
};
Packet {
header,
data: Cow::Borrowed(data)
}
}
/// Create a new owned `Packet` with the given parameters.
pub fn new_owned(ts_sec: u32, ts_nsec: u32, data: Vec<u8>, orig_len: u32) -> Packet<'static> {
let header = PacketHeader {
ts_sec,
ts_nsec,
incl_len: data.len() as u32,
orig_len
};
Packet {
header,
data: Cow::Owned(data)
}
}
/// Create a new owned `Packet` from a reader.
pub fn from_reader<R: Read, B: ByteOrder>(reader: &mut R, ts_resolution: TsResolution) -> ResultParsing<Packet<'static>> {
let header = PacketHeader::from_reader::<R, B>(reader, ts_resolution)?;
let mut bytes = vec![0_u8; header.incl_len as usize];
reader.read_exact(&mut bytes)?;
Ok(
Packet {
header,
data : Cow::Owned(bytes)
}
)
}
/// Create a new borrowed `Packet` from a slice.
pub fn from_slice<B: ByteOrder>(slice: &'a[u8], ts_resolution: TsResolution) -> ResultParsing<(&'a[u8], Packet<'a>)> {
let (slice, header) = PacketHeader::from_slice::<B>(slice, ts_resolution)?;
let len = header.incl_len as usize;
if slice.len() < len {
return Err(PcapError::IncompleteBuffer(len - slice.len()));
}
let packet = Packet {
header,
data : Cow::Borrowed(&slice[..len])
};
let slice = &slice[len..];
Ok((slice, packet))
}
/// Convert a borrowed `Packet` to an owned one.
pub fn to_owned(& self) -> Packet<'static> {
Packet {
header: self.header,
data: Cow::Owned(self.data.as_ref().to_owned())
}
}
}
|
// Exercise 1.1.
// Below is a sequence of expressions.
// What is the result printed by the interpreter in response to each expression?
// Assume that the sequence is to be evaluated in the order in which it is presented.
// 10
// 10
// (+ 5 3 4)
// 12
// (- 9 1)
// 8
// (/ 6 2)
// 3
// (+ (* 2 4) (- 4 6))
// 6
// (define a 3)
// (define b (+ a 1))
//(+ a b (* a b))
// 19
// (= a b)
// false
// (if (and (> b a) (< b (* a b)))
// b
// a)
// 4
// (cond ((= a 4) 6)
// ((= b 4) (+ 6 7 a))
// (else 25))
// 16
// (+ 2 (if (> b a) b a))
// 6
// (* (cond ((> a b) a)
// ((< a b) b)
// (else -1))
// (+ a 1))
// 16
fn main () {
}
|
//! Find matches in a large input text.
//!
//! Note that this was adapted from [regex-benchmark]; the key part retained
//! is the patterns [regex-benchmark] used for finding the matches.
//!
//! [regex-benchmark]: https://github.com/mariomka/regex-benchmark
use regex::RegexBuilder;
use sightglass_api as bench;
const EMAIL_PATTERN: &str = r"[\w\.+-]+@[\w\.-]+\.[\w\.-]+";
const URI_PATTERN: &str = r"[\w]+://[^/\s?#]+[^\s?#]+(?:\?[^\s#]*)?(?:#[^\s]*)?";
const IP_PATTERN: &str =
r"(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9])";
fn main() {
let path = "default.input";
eprintln!("[regex] matching {}", path);
let data = std::fs::read_to_string(path).expect("unable to find `*.input` text file");
bench::start();
let emails = count_matches(&data, EMAIL_PATTERN);
let uris = count_matches(&data, URI_PATTERN);
let ips = count_matches(&data, IP_PATTERN);
bench::end();
eprintln!("[regex] found {} emails", emails);
eprintln!("[regex] found {} URIs", uris);
eprintln!("[regex] found {} IPs", ips);
}
fn count_matches(data: &str, pattern: &str) -> usize {
let regex = RegexBuilder::new(pattern).build().unwrap();
regex.find_iter(data).count()
}
|
struct Selector {
rows: scraper::Selector,
differentiation: scraper::Selector,
name: scraper::Selector,
price: scraper::Selector,
amount: scraper::Selector,
refine: scraper::Selector,
properties: scraper::Selector,
}
|
use std::io;
use std::fs::File;
use std::mem;
use std::path::Path;
use num::{rational::Ratio, BigInt, BigRational};
use unicode_reader::CodePoints;
use crate::tokens::TokenTable;
use crate::types::{KToken, Name};
fn is_letter_like_unicode(c: char) -> bool {
('α' <= c && c <= 'ω' && c != 'λ') || // Lower greek, except lambda
('Α' <= c && c <= 'Ω' && c != 'Π' && c != 'Σ') || // Upper greek, except Pi and Sigma
('ϊ' <= c && c <= 'ϻ') || // Coptic letters
('ἀ' <= c && c <= '῾') || // Polytonic Greek Extended Character Set
('℀' <= c && c <= '⅏') || // Letter like block
('𝒜' <= c && c <= '𝖟') // Latin letters, Script, Double-struck, Fractur
}
fn is_sub_script_alnum_unicode(c: char) -> bool {
('ⁿ' <= c && c <= '₉') || // n superscript and numberic subscripts
('ₐ' <= c && c <= 'ₜ') || // letter-like subscripts
('ᵢ' <= c && c <= 'ᵪ') // letter-like subscripts
}
fn is_id_first(c: char) -> bool {
c.is_alphabetic() || c == '_' || c == '«' || is_letter_like_unicode(c)
}
fn is_id_rest(c: char) -> bool {
c.is_alphanumeric() || c == '_' || c == '\'' ||
is_letter_like_unicode(c) || is_sub_script_alnum_unicode(c)
}
#[derive(Debug, PartialEq)] pub enum Token {
Keyword(String, u32),
CommandKeyword(String),
Identifier(Name),
Numeral(BigInt),
Decimal(BigRational),
StringTk(String),
Char(char),
QuotedSymbol(String),
DocBlock(bool, String),
FieldNum(u32),
FieldName(Name),
Eof
}
impl Token {
pub fn is_tk(&self, s: &str) -> bool {
match self {
Token::Keyword(s2, _) => s == s2,
Token::CommandKeyword(s2) => s == s2,
_ => false
}
}
pub fn tk(&self) -> Option<&str> {
match self {
Token::Keyword(s, _) => Some(s),
Token::CommandKeyword(s) => Some(s),
_ => None
}
}
}
fn invalid(s: &str) -> io::Error { io::Error::new(io::ErrorKind::InvalidData, s) }
fn throw<A>(s: &str) -> io::Result<A> { Err(invalid(s)) }
struct LexerCore<T: Iterator<Item = io::Result<char>>> {
source: T,
pushback: Vec<char>,
curr: char,
in_notation: bool,
allow_field_notation: bool
}
impl<T: Iterator<Item = io::Result<char>>> LexerCore<T> {
fn next(&mut self) -> io::Result<char> {
self.curr =
if let Some(pb) = self.pushback.pop() {pb}
else if let Some(ch) = self.source.next() {ch?}
else {'\0'};
Ok(self.curr)
}
pub fn new(mut source: T) -> io::Result<Self> {
let curr = if let Some(ch) = source.next() {ch?} else {'\0'};
Ok(LexerCore {source, pushback: Vec::new(),
curr, in_notation: false, allow_field_notation: true})
}
fn pushback(&mut self, last: char) {
self.pushback.push(self.curr);
self.curr = last;
}
fn read_number(&mut self) -> io::Result<Token> {
let mut num = (self.curr as u32) - ('0' as u32);
let base = if num == 0 {
let base = match self.next()? {
'B' | 'b' => 2,
'O' | 'o' => 8,
'X' | 'x' => 16,
_ => 10
};
if base != 10 {
num = self.next()?.to_digit(base)
.ok_or_else(|| invalid("invalid numeral, expected digit after base prefix"))?;
}
base
} else {10};
let mut num: BigInt = num.into();
let mut denom: Option<BigInt> = None;
loop {
if let Some(val) = self.curr.to_digit(base) {
num = base*num + val;
match &mut denom { Some(q) => *q *= 10, None => () };
self.next()?;
} else if base == 10 && self.curr == '.' {
if !self.next()?.is_digit(base) || denom.is_some() {
self.pushback('.'); break
}
denom = Some(1.into());
} else {break}
}
match denom {
Some(denom) => Ok(Token::Decimal(Ratio::new(num, denom))),
None => Ok(Token::Numeral(num))
}
}
fn read_line_comment(&mut self) -> io::Result<()> {
loop {
match self.curr {
'\0' => return Ok(()),
'\n' => {self.next()?; return Ok(())},
_ => self.next()?
};
}
}
fn read_block_comment(&mut self) -> io::Result<()> {
let mut nest = 1;
loop {
match self.curr {
'\0' => return throw("unexpected end of comment block"),
'/' => if self.next()? == '-' {
self.next()?; nest += 1;
},
'-' => if self.next()? == '/' {
nest -= 1;
if nest == 0 { self.next()?; return Ok(()) }
},
_ => { self.next()?; }
}
}
}
fn read_doc_block(&mut self, modd: bool) -> io::Result<Token> {
let mut buf = String::new();
loop {
let c = self.curr;
match c {
'\0' => return throw("unexpected end of documentation block"),
'-' => if self.next()? == '/' {
self.next()?; return Ok(Token::DocBlock(modd, buf))
},
_ => { self.next()?; }
};
buf.push(c);
}
}
fn read_single_char(&mut self, err_msg: &str) -> io::Result<char> {
match self.curr {
'\0' => { throw(err_msg) },
'\\' => match self.next()? {
'\0' => { throw(err_msg) },
'n' => { self.next()?; Ok('\n') },
't' => { self.next()?; Ok('\t') },
'\'' => { self.next()?; Ok('\'') },
'\"' => { self.next()?; Ok('\"') },
'\\' => { self.next()?; Ok('\\') },
'x' => {
let hex = self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
let hex = 16*hex + self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
std::char::from_u32(hex).ok_or_else(|| invalid("invalid utf-8")) },
'u' => {
let hex = self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
let hex = 16*hex + self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
let hex = 16*hex + self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
let hex = 16*hex + self.next()?.to_digit(16).ok_or_else(|| invalid("invalid hex char in escape sequence"))?;
std::char::from_u32(hex).ok_or_else(|| invalid("invalid utf-8")) },
_ => throw("invalid escape sequence")
},
c => { self.next()?; Ok(c) }
}
}
fn read_char(&mut self) -> io::Result<Token> {
let c = self.read_single_char("unexpected end of character")?;
if self.curr != '\'' {return throw("invalid character, ' expected")}
self.next()?; Ok(Token::Char(c))
}
fn read_string(&mut self) -> io::Result<Token> {
let mut s = String::new(); self.next()?;
loop {
if self.curr == '\"' {
self.next()?; return Ok(Token::StringTk(s)) }
s.push(self.read_single_char("unexpected end of string")?);
}
}
fn read_quoted_symbol(&mut self) -> io::Result<Token> {
let mut s = String::new(); self.next()?;
let mut start = false;
let mut trailing_space = false;
loop {
match self.curr {
'\0' => return throw("unexpected quoted identifier"),
'`' if start => return throw("empty quoted identifier"),
'`' => return Ok(Token::QuotedSymbol(s)),
'\"' | '\n' | '\t' => return throw("invalid character in quoted identifier"),
' ' => { if !start {trailing_space = true}; s.push(' ') },
c if start && c.is_digit(10) => return throw("quoted identifier can't start with digit"),
_ if trailing_space => return throw("unexpected space inside of quoted symbol"),
c => { start = false; s.push(c) },
}
self.next()?;
}
}
fn read_field_idx(&mut self) -> io::Result<Token> {
let mut num: u32 = 0;
while let Some(m) = self.curr.to_digit(10) {
num = num.checked_mul(10).and_then(|n| n.checked_add(m))
.ok_or_else(|| invalid("field notation index too large"))?;
self.next()?;
}
Ok(Token::FieldNum(num))
}
fn read_id_part(&mut self, cs: &mut String) -> io::Result<()> {
let mut escaped = false;
loop {
if escaped {
match self.curr {
'»' => escaped = false,
'\r' | '\t' | '\n' | '«' => return throw("illegal character in escaped identifier"),
_ => ()
}
} else {
match self.curr {
'«' => escaped = true,
c if is_id_rest(c) => (),
_ => return Ok(())
}
}
cs.push(self.curr);
self.next()?;
}
}
fn munch<'a>(&mut self, tt: &'a TokenTable, cs: &mut String) -> io::Result<Option<(&'a KToken, usize)>> {
let mut res = tt.search().next(cs);
loop {
match res {
Ok(tk) => return Ok(tk),
Err(iter) => {
let len = cs.len();
let c = self.next()?;
if c == '\0' {return Ok(iter.finish())}
cs.push(c);
res = iter.next(&cs[len..]);
}
}
}
}
fn read_key_cmd_id(&mut self, tt: &TokenTable) -> io::Result<Token> {
let mut cs = String::new();
fn cs_to_name(cs: &str) -> Name {
let mut n: Name = Name::anon();
let mut part = String::new();
let mut escaped = false;
for c in cs.chars() {
match c {
'«' => escaped = true,
'»' => escaped = false,
'.' if !escaped =>
n = n.str(mem::replace(&mut part, String::new())),
c => part.push(c)
}
}
n.str(part)
}
let mut id_sz = 0;
if self.allow_field_notation && self.curr == '.' {
if self.next()?.is_digit(10) {return self.read_field_idx()}
if is_id_first(self.curr) && self.curr != '_' {
self.read_id_part(&mut cs)?;
return Ok(Token::FieldName(cs_to_name(&cs)))
}
cs.push('.');
} else {
while is_id_first(self.curr) {
self.read_id_part(&mut cs)?;
id_sz = cs.len();
if self.curr != '.' {break}
cs.push('.');
self.next()?;
}
}
cs.push(self.curr);
let (tk, n) = match self.munch(tt, &mut cs)?.and_then(|(tk, n)| {
if n/2 < id_sz {None} else {Some((tk, n/2))}
}) {
None => (Token::Identifier(cs_to_name(&cs[0..id_sz])), id_sz),
Some((KToken {tk, prec: None}, n)) => (Token::CommandKeyword(tk.clone()), n),
Some((KToken {tk, prec: Some(prec)}, n)) => (Token::Keyword(tk.clone(), *prec), n)
};
if n == 0 {return throw("unexpected token")}
for c in cs.split_at(n).1.chars().rev().skip(1) { self.pushback(c) }
Ok(tk)
}
pub fn lex(&mut self, tt: &TokenTable) -> io::Result<Token> {
loop {
match self.curr {
'\0' => return Ok(Token::Eof),
' ' | '\r' | '\t' | '\n' => (),
'\"' => return self.read_string(),
'`' if self.in_notation => return self.read_quoted_symbol(),
c if c.is_digit(10) => return self.read_number(),
_ => {
match self.read_key_cmd_id(tt)? {
Token::Keyword(s, prec) => match s.as_ref() {
"--" => self.read_line_comment()?,
"/-" => self.read_block_comment()?,
"/--" => return self.read_doc_block(false),
"/-!" => return self.read_doc_block(true),
"\'" => return self.read_char(),
_ => return Ok(Token::Keyword(s, prec)) },
k => return Ok(k) }
} }
self.next()?;
}
}
}
pub struct Lexer<T: io::Read> {
pub token_table: TokenTable,
data: LexerCore<CodePoints<io::Bytes<T>>>
}
pub fn from_file(path: &Path, tt: TokenTable) -> io::Result<Lexer<io::BufReader<File>>> {
Lexer::new(io::BufReader::new(File::open(path)?), tt)
}
impl<T: io::Read> Lexer<T> {
pub fn new(source: T, token_table: TokenTable) -> io::Result<Self> {
Ok(Lexer {token_table, data: LexerCore::new(CodePoints::from(source))?})
}
pub fn curr(&self) -> char { self.data.curr }
pub fn lex(&mut self) -> io::Result<Token> { self.data.lex(&self.token_table) }
pub fn allow_field_notation(&mut self, flag: bool) -> bool {
mem::replace(&mut self.data.allow_field_notation, flag)
}
}
impl<T: io::Read> Iterator for Lexer<T> {
type Item = io::Result<Token>;
fn next(&mut self) -> Option<io::Result<Token>> {
if self.curr() == '\0' {return None}
match self.lex() {
Err(err) => Some(Err(err)),
Ok(Token::Eof) => None,
Ok(tk) => Some(Ok(tk))
}
}
}
|
mod utils;
use wasm_bindgen::prelude::*;
// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global
// allocator.
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[wasm_bindgen]
extern {
fn alert(s: &str);
}
#[wasm_bindgen]
pub fn input_greeting(name: &str) {
alert(&format!("Hello, {}", name));
}
#[wasm_bindgen]
pub fn greet() {
alert("Hello, BrisReact!");
}
#[wasm_bindgen]
pub fn fibonacci(n: u32) -> u32 {
match n {
0 => 1,
1 => 1,
_ => fibonacci(n - 1) + fibonacci(n - 2),
}
}
#[wasm_bindgen]
pub struct Foo {
internal: i32,
}
#[wasm_bindgen]
impl Foo {
#[wasm_bindgen(constructor)]
pub fn new() -> Foo {
Foo { internal: 0 }
}
pub fn get(&self) -> i32 {
self.internal
}
pub fn set(&mut self, val: i32) {
self.internal = val;
}
pub fn alert_current_internal_value(&self) {
alert(&format!("The current internal value is {}", self.internal));
}
}
// The entrypoint to our function that runs before React loads.
#[wasm_bindgen(start)]
pub fn main() -> Result<(), JsValue> {
let window = web_sys::window().expect("No global window object exists.");
let document = window.document().expect("Should have a document on window.");
let body = document.body().expect("Document should have a body.");
let val = document.create_element("p")?;
val.set_inner_html("Hello from Rust and WebAssembly");
body.append_child(&val)?;
Ok(())
} |
#[doc = "Reader of register RCC_DBGCFGR"]
pub type R = crate::R<u32, super::RCC_DBGCFGR>;
#[doc = "Writer for register RCC_DBGCFGR"]
pub type W = crate::W<u32, super::RCC_DBGCFGR>;
#[doc = "Register RCC_DBGCFGR `reset()`'s with value 0x01"]
impl crate::ResetValue for super::RCC_DBGCFGR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x01
}
}
#[doc = "TRACEDIV\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum TRACEDIV_A {
#[doc = "0: aclk"]
B_0X0 = 0,
#[doc = "1: aclk / 2 (default after\r\n reset)"]
B_0X1 = 1,
#[doc = "2: aclk / 4"]
B_0X2 = 2,
#[doc = "3: aclk / 8"]
B_0X3 = 3,
}
impl From<TRACEDIV_A> for u8 {
#[inline(always)]
fn from(variant: TRACEDIV_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `TRACEDIV`"]
pub type TRACEDIV_R = crate::R<u8, TRACEDIV_A>;
impl TRACEDIV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, TRACEDIV_A> {
use crate::Variant::*;
match self.bits {
0 => Val(TRACEDIV_A::B_0X0),
1 => Val(TRACEDIV_A::B_0X1),
2 => Val(TRACEDIV_A::B_0X2),
3 => Val(TRACEDIV_A::B_0X3),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == TRACEDIV_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == TRACEDIV_A::B_0X1
}
#[doc = "Checks if the value of the field is `B_0X2`"]
#[inline(always)]
pub fn is_b_0x2(&self) -> bool {
*self == TRACEDIV_A::B_0X2
}
#[doc = "Checks if the value of the field is `B_0X3`"]
#[inline(always)]
pub fn is_b_0x3(&self) -> bool {
*self == TRACEDIV_A::B_0X3
}
}
#[doc = "Write proxy for field `TRACEDIV`"]
pub struct TRACEDIV_W<'a> {
w: &'a mut W,
}
impl<'a> TRACEDIV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRACEDIV_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "aclk"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(TRACEDIV_A::B_0X0)
}
#[doc = "aclk / 2 (default after reset)"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(TRACEDIV_A::B_0X1)
}
#[doc = "aclk / 4"]
#[inline(always)]
pub fn b_0x2(self) -> &'a mut W {
self.variant(TRACEDIV_A::B_0X2)
}
#[doc = "aclk / 8"]
#[inline(always)]
pub fn b_0x3(self) -> &'a mut W {
self.variant(TRACEDIV_A::B_0X3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
#[doc = "DBGCKEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DBGCKEN_A {
#[doc = "0: The enabling of the clock for the\r\n debug function is controlled by cdbgwrupreq\r\n signal from DAP. (default after\r\n reset)"]
B_0X0 = 0,
#[doc = "1: The clock for the debug function is\r\n enabled"]
B_0X1 = 1,
}
impl From<DBGCKEN_A> for bool {
#[inline(always)]
fn from(variant: DBGCKEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DBGCKEN`"]
pub type DBGCKEN_R = crate::R<bool, DBGCKEN_A>;
impl DBGCKEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DBGCKEN_A {
match self.bits {
false => DBGCKEN_A::B_0X0,
true => DBGCKEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == DBGCKEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == DBGCKEN_A::B_0X1
}
}
#[doc = "Write proxy for field `DBGCKEN`"]
pub struct DBGCKEN_W<'a> {
w: &'a mut W,
}
impl<'a> DBGCKEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DBGCKEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The enabling of the clock for the debug function is controlled by cdbgwrupreq signal from DAP. (default after reset)"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(DBGCKEN_A::B_0X0)
}
#[doc = "The clock for the debug function is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(DBGCKEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "TRACECKEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TRACECKEN_A {
#[doc = "0: The clock for the trace function is\r\n disabled (default after reset)"]
B_0X0 = 0,
#[doc = "1: The clock for the trace function is\r\n enabled"]
B_0X1 = 1,
}
impl From<TRACECKEN_A> for bool {
#[inline(always)]
fn from(variant: TRACECKEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `TRACECKEN`"]
pub type TRACECKEN_R = crate::R<bool, TRACECKEN_A>;
impl TRACECKEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TRACECKEN_A {
match self.bits {
false => TRACECKEN_A::B_0X0,
true => TRACECKEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == TRACECKEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == TRACECKEN_A::B_0X1
}
}
#[doc = "Write proxy for field `TRACECKEN`"]
pub struct TRACECKEN_W<'a> {
w: &'a mut W,
}
impl<'a> TRACECKEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TRACECKEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The clock for the trace function is disabled (default after reset)"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(TRACECKEN_A::B_0X0)
}
#[doc = "The clock for the trace function is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(TRACECKEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "DBGRST\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DBGRST_A {
#[doc = "0: The trace and debug parts are not\r\n reset. (default after reset)"]
B_0X0 = 0,
#[doc = "1: The trace and debug parts are under\r\n reset."]
B_0X1 = 1,
}
impl From<DBGRST_A> for bool {
#[inline(always)]
fn from(variant: DBGRST_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DBGRST`"]
pub type DBGRST_R = crate::R<bool, DBGRST_A>;
impl DBGRST_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DBGRST_A {
match self.bits {
false => DBGRST_A::B_0X0,
true => DBGRST_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == DBGRST_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == DBGRST_A::B_0X1
}
}
#[doc = "Write proxy for field `DBGRST`"]
pub struct DBGRST_W<'a> {
w: &'a mut W,
}
impl<'a> DBGRST_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DBGRST_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The trace and debug parts are not reset. (default after reset)"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(DBGRST_A::B_0X0)
}
#[doc = "The trace and debug parts are under reset."]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(DBGRST_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - TRACEDIV"]
#[inline(always)]
pub fn tracediv(&self) -> TRACEDIV_R {
TRACEDIV_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bit 8 - DBGCKEN"]
#[inline(always)]
pub fn dbgcken(&self) -> DBGCKEN_R {
DBGCKEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - TRACECKEN"]
#[inline(always)]
pub fn tracecken(&self) -> TRACECKEN_R {
TRACECKEN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 12 - DBGRST"]
#[inline(always)]
pub fn dbgrst(&self) -> DBGRST_R {
DBGRST_R::new(((self.bits >> 12) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - TRACEDIV"]
#[inline(always)]
pub fn tracediv(&mut self) -> TRACEDIV_W {
TRACEDIV_W { w: self }
}
#[doc = "Bit 8 - DBGCKEN"]
#[inline(always)]
pub fn dbgcken(&mut self) -> DBGCKEN_W {
DBGCKEN_W { w: self }
}
#[doc = "Bit 9 - TRACECKEN"]
#[inline(always)]
pub fn tracecken(&mut self) -> TRACECKEN_W {
TRACECKEN_W { w: self }
}
#[doc = "Bit 12 - DBGRST"]
#[inline(always)]
pub fn dbgrst(&mut self) -> DBGRST_W {
DBGRST_W { w: self }
}
}
|
fn main() {
// ブーリアン型
let x = true;
let y: bool = false;
// char型
let c = 'c';
let two_hearts = '💕'; // 4バイト
// 数値型
// i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64,
// u: 符号なし, i: 符号あり, fは浮動小数点型
// isizeとusizeは可変長
// 配列
let a = [1, 2, 3];
let mut m = [1, 2, 3];
// 0で初期化した長さ20の配列
let a = [0; 20];
println!("a has {} elements", a.len());
// 添字記法
let names = ["Graydon", "Brian", "Niko"]; // names: [&str; 3]
println!("The second name is: {}", names[1]);
// スライシング構文
let a = [0,1,2,3,4];
let complete = $a[..]; // すべてのスライス
let middle = $a[1..4]; // 1, 2, 3のみを要素に持つスライス
// str型
// https://doc.rust-jp.rs/the-rust-programming-language-ja/1.9/book/strings.html
// https://doc.rust-jp.rs/the-rust-programming-language-ja/1.9/book/references-and-borrowing.html
// タプル
let x = (1, "hello");
// タプル(型注釈付き)
let x: (i32, &str) = (1, "hello");
let mut x = (1, 2); // x: (i32, i32)
let y = (2, 3); // y: (i32, i32)
x = y;
// タプルの分配
let (x, y, z) = (1, 2, 3);
println!("x is {}", x);
(0,); // 1要素のタプル
(0); // 丸括弧に囲まれたゼロ
// タプルのインデックス
let tuple = (1, 2, 3);
let x = tuple.0;
let y = tuple.1;
let z = tuple.2;
println!("x is {}", x);
// 関数
fn foo(x: i32) -> i32 { x }
let x: fn(i32) -> i32 = foo;
}
|
#![crate_type = "dylib"]
#![crate_name = "points"]
// Based on http://blog.skylight.io/bending-the-curve-writing-safe-fast-native-gems-with-rust/
// by Yehuda Katz
use std::num::pow;
pub struct Point { x: int, y: int }
struct Line { p1: Point, p2: Point }
impl Line {
pub fn length(&self) -> f64 {
let xdiff = self.p1.x - self.p2.x;
let ydiff = self.p1.y - self.p2.y;
((pow(xdiff, 2) + pow(ydiff, 2)) as f64).sqrt()
}
}
#[no_mangle]
pub extern "C" fn make_point(x: int, y: int) -> Box<Point> {
box Point { x: x, y: y }
}
#[no_mangle]
pub extern "C" fn free_point(p: Box<Point>) {
drop(p);
}
#[no_mangle]
pub extern "C" fn get_distance(p1: &Point, p2: &Point) -> f64 {
Line { p1: *p1, p2: *p2 }.length()
} |
#![allow(dead_code, unused_imports)]
use env_logger;
use log::{debug, info};
use std::io;
use wikitools::extract::extract_anchor_counts_to_trie;
use wikitools::extract::{TrieBuilderFlat, TrieBuilderNested};
use wikitools::indices::{read_indices, write_all_indices, write_template_indices, WikiDumpIndices};
use wikitools::settings::Settings;
use wikitools::template::compile_templates;
use wikitools::utils::Timer;
use wikitools::loaders::{
build_or_load_page_indices,
build_or_load_template_indices,
};
use bincode;
use qp_trie::{
wrapper::{BStr, BString},
Trie,
};
use serde::Serialize;
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::path::Path;
/// Serialize a Trie into a .qpt binary file.
fn write_to_qpt<V>(
anchor_counts: &Trie<BString, V>,
path: &Path,
buf_size: Option<usize>,
) -> bincode::Result<()>
where
V: Serialize,
{
let file = File::create(path)?;
let buf_size = buf_size.unwrap_or(256 * 1024 * 1024);
let file = BufWriter::with_capacity(buf_size, file);
bincode::serialize_into(file, &anchor_counts)
}
fn read_from_qpt<V>(
anchor_counts_flat_path: &Path,
buf_size: Option<usize>,
) -> bincode::Result<Trie<BString, u32>>
where
V: Serialize,
{
let mut timer = Timer::new();
info!("Loading anchor counts...");
timer.reset();
let file = File::open(anchor_counts_flat_path)?;
let buf_size = buf_size.unwrap_or(256 * 1024 * 1024);
let reader = BufReader::with_capacity(buf_size, file);
let anchor_counts: Trie<BString, u32> = bincode::deserialize_from(reader)?;
timer.finish();
Ok(anchor_counts)
}
/// Build and serialise a FST from flat anchors.
fn build_fst_from_anchors(anchor_counts: Trie<BString, u32>, output_path: &Path) -> Result<(), Box<std::error::Error>> {
let mut timer = Timer::new();
use fst::{Map, MapBuilder, IntoStreamer, Streamer};
use fst_regex::Regex;
info!("Stripping anchors...");
timer.reset();
let mut anchors = anchor_counts.into_iter()
.map(|(key, value)| (key.into(), value as u64))
.collect::<Vec<(String, u64)>>();
timer.finish();
info!("Sorting anchors...");
timer.reset();
anchors.sort_by(|(k1, _), (k2, _)| k1.partial_cmp(k2).unwrap());
timer.finish();
let file = File::create(output_path)?;
let buf = BufWriter::with_capacity(256 * 1024 * 1024, file);
let mut bld = MapBuilder::new(buf)?;
info!("Building FST...");
timer.reset();
bld.extend_iter(anchors.into_iter())?;
bld.finish().unwrap();
timer.finish();
Ok(())
}
// use crate::extract::AnchorTrieBuilder;
// fn test_<Builder, V>() -> Result<(), Box<std::error::Error>>
// where Builder: AnchorTrieBuilder<V> {
// use std::fs::File;
// use std::io::BufWriter;
// use serde_json;
// let anchor_counts = Builder::extract(Path::new("pages10.xml.bz2"), 0);
// let file = File::create("anchor-counts-test-sm.json")?;
// let file = BufWriter::new(file);
// serde_json::to_writer_pretty(file, &anchor_counts)
// }
fn main() -> Result<(), Box<std::error::Error>> {
env_logger::init();
let settings = Settings::new("config.toml")?;
info!("wikitools dump 0.0.0");
debug!("settings: {:#?}", settings);
// Fetch all page indices, writing to file if they do not already exist.
let page_indices = build_or_load_page_indices(&settings)?;
// Fetch all template indices, writing to file if they do not already exist.
let template_indices = build_or_load_template_indices(&settings)?;
// If the templates master file does not exist, create it.
if !settings.templates.exists() {
info!("Compiling templates file");
compile_templates(&template_indices, &settings.data.dump, &settings.templates);
};
if !settings.anchors.anchor_counts.exists() {
info!("Building anchor counts...");
let anchor_counts = extract_anchor_counts_to_trie(
TrieBuilderFlat,
&page_indices,
&settings.data.dump
);
info!("Building FST from anchor counts...");
build_fst_from_anchors(anchor_counts, &settings.anchors.anchor_counts)?;
}
Ok(())
/*
for (surface_form, entities) in anchor_counts {
for (entity, count) in entities {
writeln!(writer, "{}\t{}\t{}", surface_form, entity, count).unwrap();
}
prog_bar.inc();
}
let index_dir = &settings.search_index.index_dir;
let (_schema, _index) = if !index_dir.exists() {
build_index(index_dir, &page_indices, &data.dump, 500_000_000)
.expect("Failed to build Index")
} else {
let dir = MmapDirectory::open(&index_dir).unwrap();
let index = Index::open(dir).expect("Failed to load Index");
(index.schema(), index)
};
*/
}
|
// cd C:\Users\むずでょ\source\repos\practice-rust
// cargo new chat-client-1
//
// cd C:\Users\むずでょ\source\repos\practice-rust\chat-client-1
// cargo check
// cargo build
// cargo run
//
// See also:
// https://crates.io/
// #[macro_use]
extern crate serde_derive;
extern crate toml;
// TcpListener は、標準のと Tokio のとがあって、どちらか選べだぜ☆(^~^)
use std::io::{BufRead, BufReader, BufWriter, Error, Write};
use std::net::TcpStream;
mod config;
use config::*;
fn main() {
println!("I am a client!");
match read_toml("./config.toml".to_string()) {
Ok(config) => {
let host = config.host.unwrap();
let host_text = format!("{}:{}", host.domain.unwrap(), host.port.unwrap());
println!("Host | {}", host_text);
let client = TcpStream::connect(&host_text)
.and_then(move |stream| {
stream.set_nodelay(true);
println!("Connected from | {}", stream.peer_addr()?);
// Buffering.
let mut reader = BufReader::new(&stream);
let mut writer = BufWriter::new(&stream);
loop {
// Standard input
let mut line = String::new();
std::io::stdin().read_line(&mut line).ok();
// 改行を削る。
line = line.replace("\r", "\n").replace("\n", "");
println!("Input | [{}]", line);
match line.as_str() {
"exit" => {
break;
}
"" => {
// 空打ちで、サーバーのメッセージ読取。
if let Err(err) = reader.read_line(&mut line) {
// panic!("error during receive a line: {}", err);
println!("Error | {}", err);
}
println!("Read | {}", line);
}
_ => {
// その他は、サーバーへのメッセージ送信。
println!("Write | {}", line);
writer.write(format!("{}\n", line).as_bytes())?;
writer.flush()?;
}
}
}
Ok(())
})
.map_err(|err| {
println!("connection error = {:?}", err);
});
}
Err(err) => panic!(err),
}
}
|
use svm_sdk_std::Option;
/// Used for traversal of the encoded function buffer.
/// It will be used by the `Decoder`.
///
/// By having the isolation between the `Decoder` and `Cursor` we are able
/// to execute a `Decoder` method that receives as a parameter `&mut Cursor`
/// while the `Decoder` is borrowed `&self`.
///
/// This separation was born out of a need to comply to the safe Rust ownership rules
/// (see the look under the `decode_array` under `Decoder` as an example).
pub struct Cursor {
/// Pointer to the traversed bytes
pub bytes: *const u8,
/// The current pointed-by offset
pub offset: usize,
/// Number of bytes pointed-by `bytes`
pub length: usize,
}
impl Cursor {
/// Creates a new `Cursor` for encoded function buffer `bytes`
pub fn new(bytes: &[u8]) -> Self {
let length = bytes.len();
let bytes = bytes.as_ptr();
Self {
bytes,
length,
offset: 0,
}
}
/// Returns whether cursor has finished traversal
#[inline]
pub fn is_eof(&self) -> bool {
self.offset >= self.len()
}
/// The length of the underlying buffer
#[inline]
pub fn len(&self) -> usize {
self.length
}
/// Returns the next looked-at byte without incrementing `offset`
/// If already pointing at `EOF` - returns `None`.
#[inline]
pub fn peek(&self) -> Option<u8> {
if self.is_eof() {
return Option::None;
}
let byte = unsafe { *self.offset_ptr() };
Option::Some(byte)
}
/// Returns the next looked-at byte and increments the `offset`.
/// If already pointing at `EOF` - returns `None`.
#[inline]
pub fn read_byte(&mut self) -> Option<u8> {
let byte = self.peek();
self.offset += 1;
byte
}
/// If there are at least `nbytes` unprocessed-yet bytes,
/// returns a raw pointer to the current pointed-by address.
/// And then, it increments the `offset` by `nbytes`.
///
/// In case there are less then `nbytes` left bytes - returns `None`.
pub fn read_bytes(&mut self, nbytes: usize) -> Option<*const u8> {
let last_byte_off = self.offset + nbytes - 1;
if last_byte_off >= self.len() {
return Option::None;
}
let ptr = unsafe { self.offset_ptr() };
self.offset += nbytes;
Option::Some(ptr)
}
/// Returns a raw pointer to the current pointed-at address.
#[inline]
pub unsafe fn offset_ptr(&self) -> *const u8 {
self.bytes.add(self.offset)
}
}
|
use std::fs;
use std::collections::HashMap;
type MapType = HashMap<String, Vec<String>>;
fn parse_input(line: &str, signals: &mut MapType) {
let splitted: Vec<&str> = line.splitn(2, " -> ").collect();
let key = splitted[1].to_string();
let entry: Vec<String> = splitted[0].splitn(3, " ")
.map(|s| s.to_owned())
.collect();
//println!("{} => \t\t{:?} => \t\t{:?}", line, splitted, entry);
signals.insert(key, entry);
}
fn extract_value(key: &String, signals: &MapType) -> Vec<String> {
if !signals.contains_key(key) {
//most probably a value
return vec![key.to_string()];
}
else {
return signals.get(key).unwrap().to_vec();
}
}
fn get_value(s: &Vec<String>, signals: &mut MapType) -> u32 {
//println!("{:?}", s);
if s.len() == 3 {
let param1 = extract_value(&s[0], signals);
let param2 = extract_value(&s[2], signals);
match s[1].as_str() {
"AND" => return get_value(¶m1, signals) & get_value(¶m2, signals),
"OR" => return get_value(¶m1, signals) | get_value(¶m2, signals),
"LSHIFT" => return get_value(¶m1, signals) << get_value(¶m2, signals),
"RSHIFT" => return get_value(¶m1, signals) >> get_value(¶m2, signals),
_ => 0
}
}
else if s.len() == 2 {
let param1 = extract_value(&s[1], signals);
let val = get_value(¶m1, signals);
signals.insert(s[1].to_owned(), vec![val.to_string()]);
return val;
}
else if s.len() == 1{
if s[0].parse::<u32>().is_ok() {
return s[0].parse::<u32>().unwrap();
}
else {
let entry = extract_value(&s[0], signals);
let val = get_value(&entry, signals);
signals.insert(s[0].to_owned(), vec![val.to_string()]);
println!("updated with {:?}", val);
return val;
}
}
else {
panic!("dupa");
}
}
fn part_1(input: &str) -> u32 {
let mut map = MapType::new();
for l in input.lines() {
parse_input(l, &mut map);
}
// for (k, v) in &map{
// println!("{:?}<===>{:?}", k, v);
// }
let e = extract_value(&"a".to_string(), &map);
get_value(&e, &mut map)
}
fn part_2(_input: &str) -> u32 {
0
}
fn main() {
let content = fs::read_to_string("input").expect("file not found");
let content = content.trim();
println!("First puzzle: {}", part_1(&content));
println!("Second puzzle: {}", part_2(&content));
}
#[cfg(test)]
mod day07 {
use super::*;
#[test]
fn test_parse_input() {
let mut map = MapType::new();
parse_input("123 -> x", &mut map);
}
}
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use regex::Regex;
fn main() {
println!(
"{}",
Regex::new(r"[gkmqvwxzio]")
.map(|invalid| {
File::open("words/words.txt")
.map(|source| {
BufReader::new(source)
.lines()
.map(|l| l.expect("Failed to read word"))
.filter(|l| !invalid.is_match(&l))
.max_by(|a, b| a.len().cmp(&b.len()))
.map(|word| format!("Longest Word: {} ({})", word, word.len()))
.unwrap_or(String::from("No word found"))
})
.unwrap_or(String::from("Failed to open words file"))
})
.unwrap_or(String::from("Invalid expression"))
);
}
|
#[allow(non_camel_case_types)]
use scan_fmt::*;
use std::io::Read;
use std::fs::File;
use std::collections::HashMap;
use std::collections::HashSet;
use std::iter::FromIterator;
type Val = usize;
type Regs = [Val; 4];
type OpCode = Val;
type A = Val;
type B = Val;
type C = Val;
fn addr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] + r[b] ; r }
fn addi(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] + b ; r }
fn mulr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] * r[b] ; r }
fn muli(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] * b ; r }
fn banr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] & r[b] ; r }
fn bani(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] & b ; r }
fn borr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] | r[b] ; r }
fn bori(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = r[a] | b ; r }
fn setr(mut r: Regs, (a,_,c): (A,B,C)) -> Regs { r[c] = r[a] ; r }
fn seti(mut r: Regs, (a,_,c): (A,B,C)) -> Regs { r[c] = a ; r }
fn gtir(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if a > r[b] { 1 } else { 0 } ; r }
fn gtri(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if r[a] > b { 1 } else { 0 } ; r }
fn gtrr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if r[a] > r[b] { 1 } else { 0 } ; r }
fn eqir(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if a == r[b] { 1 } else { 0 } ; r }
fn eqri(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if r[a] == b { 1 } else { 0 } ; r }
fn eqrr(mut r: Regs, (a,b,c): (A,B,C)) -> Regs { r[c] = if r[a] == r[b] { 1 } else { 0 } ; r }
fn parse_op(line: &str) -> (Val,Val,Val,Val) {
let y = scan_fmt!(line, "{} {} {} {}", OpCode, A, B, C);
(y.0.unwrap(), y.1.unwrap(), y.2.unwrap(), y.3.unwrap())
}
fn parse_sample(input: &mut Lines<'_>) -> (Regs,(Val,Val,Val,Val),Regs) {
let x = scan_fmt!(input.next().unwrap(), "Before: [{}, {}, {}, {}]", OpCode, A, B, C);
let y = scan_fmt!(input.next().unwrap(), "{} {} {} {}", OpCode, A, B, C);
let z = scan_fmt!(input.next().unwrap(), "After: [{}, {}, {}, {}]", OpCode, A, B, C);
([x.0.unwrap(), x.1.unwrap(), x.2.unwrap(), x.3.unwrap()],
(y.0.unwrap(), y.1.unwrap(), y.2.unwrap(), y.3.unwrap()),
[z.0.unwrap(), z.1.unwrap(), z.2.unwrap(), z.3.unwrap()])
}
use std::str::Lines;
fn main() {
let mut file = File::open("input-1").unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let mut input = buf.lines();
let ops: Vec<fn(Regs, (Val,Val,Val)) -> Regs> = vec![
addr, addi,
mulr, muli,
banr, bani,
borr, bori,
setr, seti,
gtir, gtri, gtrr,
eqir, eqri, eqrr,
];
let mut unknown: HashMap<OpCode, HashSet<OpCode>> = HashMap::new();
loop {
let (pre, (sample_opcode,a,b,c), post) = parse_sample(&mut input);
for (opcode, op) in ops.iter().enumerate() {
if op(pre, (a,b,c)) == post {
unknown
.entry(sample_opcode)
.and_modify(|matches| { matches.insert(opcode); })
.or_insert_with(|| HashSet::from_iter(vec![opcode]));
}
}
if input.next().is_none() {
break;
}
}
let mut translation: HashMap<OpCode, OpCode> = HashMap::new();
loop {
let mut singles: Vec<(OpCode, OpCode)> = Vec::new();
for (sample_opcode, matches) in unknown.iter() {
if matches.len() == 1 {
singles.push((*sample_opcode, *matches.iter().next().unwrap()));
}
}
for (sample_opcode, _) in singles.iter() {
unknown.remove(&sample_opcode);
}
for (_, opcode) in singles.iter() {
for (_, matches) in unknown.iter_mut() {
matches.remove(&opcode);
}
}
translation.extend(singles);
if unknown.len() == 0 {
break;
}
}
println!("{:?}", translation);
let mut file = File::open("input-2").unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let input = buf.lines();
let mut reg: Regs = [0; 4];
for line in input {
let (opcode, a, b, c) = parse_op(line);
reg = ops[*translation.get(&opcode).unwrap()](reg, (a,b,c));
}
println!("{:?}", reg);
}
|
use std::time::SystemTime;
use actix_web::http::StatusCode;
use chrono::{DateTime, Local};
use chrono_humanize::Humanize;
use clap::{crate_name, crate_version, ValueEnum};
use fast_qr::{
convert::{svg::SvgBuilder, Builder},
qr::QRCodeError,
QRBuilder,
};
use http::Uri;
use maud::{html, Markup, PreEscaped, DOCTYPE};
use strum::{Display, IntoEnumIterator};
use crate::auth::CurrentUser;
use crate::consts;
use crate::listing::{Breadcrumb, Entry, QueryParameters, SortingMethod, SortingOrder};
use crate::{archive::ArchiveMethod, MiniserveConfig};
#[allow(clippy::too_many_arguments)]
/// Renders the file listing
pub fn page(
entries: Vec<Entry>,
readme: Option<(String, String)>,
abs_uri: &Uri,
is_root: bool,
query_params: QueryParameters,
breadcrumbs: &[Breadcrumb],
encoded_dir: &str,
conf: &MiniserveConfig,
current_user: Option<&CurrentUser>,
) -> Markup {
// If query_params.raw is true, we want render a minimal directory listing
if query_params.raw.is_some() && query_params.raw.unwrap() {
return raw(entries, is_root);
}
let upload_route = format!("{}/upload", &conf.route_prefix);
let (sort_method, sort_order) = (query_params.sort, query_params.order);
let upload_action = build_upload_action(&upload_route, encoded_dir, sort_method, sort_order);
let mkdir_action = build_mkdir_action(&upload_route, encoded_dir);
let title_path = breadcrumbs_to_path_string(breadcrumbs);
let upload_allowed = conf.allowed_upload_dir.is_empty()
|| conf
.allowed_upload_dir
.iter()
.any(|x| encoded_dir.starts_with(&format!("/{x}")));
html! {
(DOCTYPE)
html {
(page_header(&title_path, conf.file_upload, &conf.favicon_route, &conf.css_route))
body #drop-container
{
div.toolbar_box_group {
@if conf.file_upload {
div.form {
div.form_title {
h1 { "Drop your file here to upload it" }
}
}
}
@if conf.mkdir_enabled {
div.form {
div.form_title {
h1 { "Create a new directory" }
}
}
}
}
nav {
(qr_spoiler(conf.show_qrcode, abs_uri))
(color_scheme_selector(conf.hide_theme_selector))
}
div.container {
span #top { }
h1.title dir="ltr" {
@for el in breadcrumbs {
@if el.link == "." {
// wrapped in span so the text doesn't shift slightly when it turns into a link
span { bdi { (el.name) } }
} @else {
a href=(parametrized_link(&el.link, sort_method, sort_order, false)) {
bdi { (el.name) }
}
}
"/"
}
}
div.toolbar {
@if conf.tar_enabled || conf.tar_gz_enabled || conf.zip_enabled {
div.download {
@for archive_method in ArchiveMethod::iter() {
@if archive_method.is_enabled(conf.tar_enabled, conf.tar_gz_enabled, conf.zip_enabled) {
(archive_button(archive_method, sort_method, sort_order))
}
}
}
}
div.toolbar_box_group {
@if conf.file_upload && upload_allowed {
div.toolbar_box {
form id="file_submit" action=(upload_action) method="POST" enctype="multipart/form-data" {
p { "Select a file to upload or drag it anywhere into the window" }
div {
@match &conf.uploadable_media_type {
Some(accept) => {input #file-input accept=(accept) type="file" name="file_to_upload" required="" multiple {}},
None => {input #file-input type="file" name="file_to_upload" required="" multiple {}}
}
button type="submit" { "Upload file" }
}
}
}
}
@if conf.mkdir_enabled {
div.toolbar_box {
form id="mkdir" action=(mkdir_action) method="POST" enctype="multipart/form-data" {
p { "Specify a directory name to create" }
div.toolbar_box {
input type="text" name="mkdir" required="" placeholder="Directory name" {}
button type="submit" { "Create directory" }
}
}
}
}
}
}
table {
thead {
th.name { (build_link("name", "Name", sort_method, sort_order)) }
th.size { (build_link("size", "Size", sort_method, sort_order)) }
th.date { (build_link("date", "Last modification", sort_method, sort_order)) }
}
tbody {
@if !is_root {
tr {
td colspan="3" {
p {
span.root-chevron { (chevron_left()) }
a.root href=(parametrized_link("../", sort_method, sort_order, false)) {
"Parent directory"
}
}
}
}
}
@for entry in entries {
(entry_row(entry, sort_method, sort_order, false))
}
}
}
@if let Some(readme) = readme {
div id="readme" {
h3 id="readme-filename" { (readme.0) }
div id="readme-contents" {
(PreEscaped (readme.1))
};
}
}
a.back href="#top" {
(arrow_up())
}
div.footer {
@if conf.show_wget_footer {
(wget_footer(abs_uri, conf.title.as_deref(), current_user.map(|x| &*x.name)))
}
@if !conf.hide_version_footer {
(version_footer())
}
}
}
}
}
}
}
/// Renders the file listing
pub fn raw(entries: Vec<Entry>, is_root: bool) -> Markup {
html! {
(DOCTYPE)
html {
body {
table {
thead {
th.name { "Name" }
th.size { "Size" }
th.date { "Last modification" }
}
tbody {
@if !is_root {
tr {
td colspan="3" {
p {
a.root href=(parametrized_link("../", None, None, true)) {
".."
}
}
}
}
}
@for entry in entries {
(entry_row(entry, None, None, true))
}
}
}
}
}
}
}
/// Renders the QR code SVG
fn qr_code_svg(url: &Uri, margin: usize) -> Result<String, QRCodeError> {
let qr = QRBuilder::new(url.to_string())
.ecl(consts::QR_EC_LEVEL)
.build()?;
let svg = SvgBuilder::default().margin(margin).to_str(&qr);
Ok(svg)
}
/// Build a path string from a list of breadcrumbs.
fn breadcrumbs_to_path_string(breadcrumbs: &[Breadcrumb]) -> String {
breadcrumbs
.iter()
.map(|el| el.name.clone())
.collect::<Vec<_>>()
.join("/")
}
// Partial: version footer
fn version_footer() -> Markup {
html! {
div.version {
(format!("{}/{}", crate_name!(), crate_version!()))
}
}
}
fn wget_footer(abs_path: &Uri, root_dir_name: Option<&str>, current_user: Option<&str>) -> Markup {
fn escape_apostrophes(x: &str) -> String {
x.replace('\'', "'\"'\"'")
}
// Directory depth, 0 is root directory
let cut_dirs = match abs_path.path().matches('/').count() - 1 {
// Put all the files in a folder of this name
0 => format!(
" -P '{}'",
escape_apostrophes(
root_dir_name.unwrap_or_else(|| abs_path.authority().unwrap().as_str())
)
),
1 => String::new(),
// Avoids putting the files in excessive directories
x => format!(" --cut-dirs={}", x - 1),
};
// Ask for password if authentication is required
let user_params = match current_user {
Some(user) => format!(" --ask-password --user '{}'", escape_apostrophes(user)),
None => String::new(),
};
let encoded_abs_path = abs_path.to_string().replace('\'', "%27");
let command = format!(
"wget -rcnHp -R 'index.html*'{cut_dirs}{user_params} '{encoded_abs_path}?raw=true'"
);
let click_to_copy = format!("navigator.clipboard.writeText(\"{command}\")");
html! {
div.downloadDirectory {
p { "Download folder:" }
a.cmd title="Click to copy!" style="cursor: pointer;" onclick=(click_to_copy) { (command) }
}
}
}
/// Build the action of the upload form
fn build_upload_action(
upload_route: &str,
encoded_dir: &str,
sort_method: Option<SortingMethod>,
sort_order: Option<SortingOrder>,
) -> String {
let mut upload_action = format!("{upload_route}?path={encoded_dir}");
if let Some(sorting_method) = sort_method {
upload_action = format!("{}&sort={}", upload_action, &sorting_method);
}
if let Some(sorting_order) = sort_order {
upload_action = format!("{}&order={}", upload_action, &sorting_order);
}
upload_action
}
/// Build the action of the mkdir form
fn build_mkdir_action(mkdir_route: &str, encoded_dir: &str) -> String {
format!("{mkdir_route}?path={encoded_dir}")
}
const THEME_PICKER_CHOICES: &[(&str, &str)] = &[
("Default (light/dark)", "default"),
("Squirrel (light)", "squirrel"),
("Arch Linux (dark)", "archlinux"),
("Zenburn (dark)", "zenburn"),
("Monokai (dark)", "monokai"),
];
#[derive(Debug, Clone, ValueEnum, Display)]
pub enum ThemeSlug {
#[strum(serialize = "squirrel")]
Squirrel,
#[strum(serialize = "archlinux")]
Archlinux,
#[strum(serialize = "zenburn")]
Zenburn,
#[strum(serialize = "monokai")]
Monokai,
}
impl ThemeSlug {
pub fn css(&self) -> &str {
match self {
ThemeSlug::Squirrel => grass::include!("data/themes/squirrel.scss"),
ThemeSlug::Archlinux => grass::include!("data/themes/archlinux.scss"),
ThemeSlug::Zenburn => grass::include!("data/themes/zenburn.scss"),
ThemeSlug::Monokai => grass::include!("data/themes/monokai.scss"),
}
}
pub fn css_dark(&self) -> String {
format!("@media (prefers-color-scheme: dark) {{\n{}}}", self.css())
}
}
/// Partial: qr code spoiler
fn qr_spoiler(show_qrcode: bool, content: &Uri) -> Markup {
html! {
@if show_qrcode {
div {
p {
"QR code"
}
div.qrcode #qrcode title=(PreEscaped(content.to_string())) {
@match qr_code_svg(content, consts::SVG_QR_MARGIN) {
Ok(svg) => (PreEscaped(svg)),
Err(err) => (format!("QR generation error: {err:?}")),
}
}
}
}
}
}
/// Partial: color scheme selector
fn color_scheme_selector(hide_theme_selector: bool) -> Markup {
html! {
@if !hide_theme_selector {
div {
p {
"Change theme..."
}
ul.theme {
@for color_scheme in THEME_PICKER_CHOICES {
li data-theme=(color_scheme.1) {
(color_scheme_link(color_scheme))
}
}
}
}
}
}
}
// /// Partial: color scheme link
fn color_scheme_link(color_scheme: &(&str, &str)) -> Markup {
let title = format!("Switch to {} theme", color_scheme.0);
html! {
a href=(format!("javascript:updateColorScheme(\"{}\")", color_scheme.1)) title=(title) {
(color_scheme.0)
}
}
}
/// Partial: archive button
fn archive_button(
archive_method: ArchiveMethod,
sort_method: Option<SortingMethod>,
sort_order: Option<SortingOrder>,
) -> Markup {
let link = if sort_method.is_none() && sort_order.is_none() {
format!("?download={archive_method}")
} else {
format!(
"{}&download={}",
parametrized_link("", sort_method, sort_order, false),
archive_method
)
};
let text = format!("Download .{}", archive_method.extension());
html! {
a href=(link) {
(text)
}
}
}
/// Ensure that there's always a trailing slash behind the `link`.
fn make_link_with_trailing_slash(link: &str) -> String {
if link.is_empty() || link.ends_with('/') {
link.to_string()
} else {
format!("{link}/")
}
}
/// If they are set, adds query parameters to links to keep them across pages
fn parametrized_link(
link: &str,
sort_method: Option<SortingMethod>,
sort_order: Option<SortingOrder>,
raw: bool,
) -> String {
if raw {
return format!("{}?raw=true", make_link_with_trailing_slash(link));
}
if let Some(method) = sort_method {
if let Some(order) = sort_order {
let parametrized_link = format!(
"{}?sort={}&order={}",
make_link_with_trailing_slash(link),
method,
order,
);
return parametrized_link;
}
}
make_link_with_trailing_slash(link)
}
/// Partial: table header link
fn build_link(
name: &str,
title: &str,
sort_method: Option<SortingMethod>,
sort_order: Option<SortingOrder>,
) -> Markup {
let mut link = format!("?sort={name}&order=asc");
let mut help = format!("Sort by {name} in ascending order");
let mut chevron = chevron_up();
let mut class = "";
if let Some(method) = sort_method {
if method.to_string() == name {
class = "active";
if let Some(order) = sort_order {
if order.to_string() == "asc" {
link = format!("?sort={name}&order=desc");
help = format!("Sort by {name} in descending order");
chevron = chevron_down();
}
}
}
};
html! {
span class=(class) {
span.chevron { (chevron) }
a href=(link) title=(help) { (title) }
}
}
}
/// Partial: row for an entry
fn entry_row(
entry: Entry,
sort_method: Option<SortingMethod>,
sort_order: Option<SortingOrder>,
raw: bool,
) -> Markup {
html! {
tr {
td {
p {
@if entry.is_dir() {
@if let Some(symlink_dest) = entry.symlink_info {
a.symlink href=(parametrized_link(&entry.link, sort_method, sort_order, raw)) {
(entry.name) "/"
span.symlink-symbol { }
a.directory {(symlink_dest) "/"}
}
}@else {
a.directory href=(parametrized_link(&entry.link, sort_method, sort_order, raw)) {
(entry.name) "/"
}
}
} @else if entry.is_file() {
@if let Some(symlink_dest) = entry.symlink_info {
a.symlink href=(&entry.link) {
(entry.name)
span.symlink-symbol { }
a.file {(symlink_dest)}
}
}@else {
a.file href=(&entry.link) {
(entry.name)
}
}
@if !raw {
@if let Some(size) = entry.size {
span.mobile-info.size {
(maud::display(size))
}
}
}
}
}
}
td.size-cell {
@if let Some(size) = entry.size {
(maud::display(size))
}
}
td.date-cell {
@if let Some(modification_date) = convert_to_local(entry.last_modification_date) {
span {
(modification_date) " "
}
}
@if let Some(modification_timer) = humanize_systemtime(entry.last_modification_date) {
span.history {
(modification_timer)
}
}
}
}
}
}
/// Partial: up arrow
fn arrow_up() -> Markup {
PreEscaped("⇪".to_string())
}
/// Partial: chevron left
fn chevron_left() -> Markup {
PreEscaped("◂".to_string())
}
/// Partial: chevron up
fn chevron_up() -> Markup {
PreEscaped("▴".to_string())
}
/// Partial: chevron up
fn chevron_down() -> Markup {
PreEscaped("▾".to_string())
}
/// Partial: page header
fn page_header(title: &str, file_upload: bool, favicon_route: &str, css_route: &str) -> Markup {
html! {
head {
meta charset="utf-8";
meta http-equiv="X-UA-Compatible" content="IE=edge";
meta name="viewport" content="width=device-width, initial-scale=1";
meta name="color-scheme" content="dark light";
link rel="icon" type="image/svg+xml" href={ (favicon_route) };
link rel="stylesheet" href={ (css_route) };
title { (title) }
(PreEscaped(r#"
<script>
// updates the color scheme by setting the theme data attribute
// on body and saving the new theme to local storage
function updateColorScheme(name) {
if (name && name != "default") {
localStorage.setItem('theme', name);
document.body.setAttribute("data-theme", name)
} else {
localStorage.removeItem('theme');
document.body.removeAttribute("data-theme")
}
}
// read theme from local storage and apply it to body
function loadColorScheme() {
var name = localStorage.getItem('theme');
updateColorScheme(name);
}
// load saved theme on page load
addEventListener("load", loadColorScheme);
// load saved theme when local storage is changed (synchronize between tabs)
addEventListener("storage", loadColorScheme);
</script>
"#))
@if file_upload {
(PreEscaped(r#"
<script>
window.onload = function() {
const dropContainer = document.querySelector('#drop-container');
const dragForm = document.querySelector('.drag-form');
const fileInput = document.querySelector('#file-input');
const collection = [];
dropContainer.ondragover = function(e) {
e.preventDefault();
}
dropContainer.ondragenter = function(e) {
e.preventDefault();
if (collection.length === 0) {
dragForm.style.display = 'initial';
}
collection.push(e.target);
};
dropContainer.ondragleave = function(e) {
e.preventDefault();
collection.splice(collection.indexOf(e.target), 1);
if (collection.length === 0) {
dragForm.style.display = 'none';
}
};
dropContainer.ondrop = function(e) {
e.preventDefault();
fileInput.files = e.dataTransfer.files;
file_submit.submit();
dragForm.style.display = 'none';
};
}
</script>
"#))
}
}
}
}
/// Converts a SystemTime object to a strings tuple (date, time)
fn convert_to_local(src_time: Option<SystemTime>) -> Option<String> {
src_time
.map(DateTime::<Local>::from)
.map(|date_time| date_time.format("%Y-%m-%d %H:%M:%S %:z").to_string())
}
/// Converts a SystemTime to a string readable by a human,
/// and gives a rough approximation of the elapsed time since
fn humanize_systemtime(time: Option<SystemTime>) -> Option<String> {
time.map(|time| time.humanize())
}
/// Renders an error on the webpage
pub fn render_error(
error_description: &str,
error_code: StatusCode,
conf: &MiniserveConfig,
return_address: &str,
) -> Markup {
html! {
(DOCTYPE)
html {
(page_header(&error_code.to_string(), false, &conf.favicon_route, &conf.css_route))
body
{
div.error {
p { (error_code.to_string()) }
@for error in error_description.lines() {
p { (error) }
}
// WARN don't expose random route!
@if conf.route_prefix.is_empty() {
div.error-nav {
a.error-back href=(return_address) {
"Go back to file listing"
}
}
}
@if !conf.hide_version_footer {
p.footer {
(version_footer())
}
}
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
fn to_html(wget_part: &str) -> String {
format!(
r#"<div class="downloadDirectory"><p>Download folder:</p><a class="cmd" title="Click to copy!" style="cursor: pointer;" onclick="navigator.clipboard.writeText("wget -rcnHp -R 'index.html*' {wget_part}/?raw=true'")">wget -rcnHp -R 'index.html*' {wget_part}/?raw=true'</a></div>"#
)
}
fn uri(x: &str) -> Uri {
Uri::try_from(x).unwrap()
}
#[test]
fn test_wget_footer_trivial() {
let to_be_tested: String = wget_footer(&uri("https://github.com/"), None, None).into();
let expected = to_html("-P 'github.com' 'https://github.com");
assert_eq!(to_be_tested, expected);
}
#[test]
fn test_wget_footer_with_root_dir() {
let to_be_tested: String = wget_footer(
&uri("https://github.com/svenstaro/miniserve/"),
Some("Miniserve"),
None,
)
.into();
let expected = to_html("--cut-dirs=1 'https://github.com/svenstaro/miniserve");
assert_eq!(to_be_tested, expected);
}
#[test]
fn test_wget_footer_with_root_dir_and_user() {
let to_be_tested: String = wget_footer(
&uri("http://1und1.de/"),
Some("1&1 - Willkommen!!!"),
Some("Marcell D'Avis"),
)
.into();
let expected = to_html("-P '1&1 - Willkommen!!!' --ask-password --user 'Marcell D'"'"'Avis' 'http://1und1.de");
assert_eq!(to_be_tested, expected);
}
#[test]
fn test_wget_footer_escaping() {
let to_be_tested: String = wget_footer(
&uri("http://127.0.0.1:1234/geheime_dokumente.php/"),
Some("Streng Geheim!!!"),
Some("uøý`¶'7ÅÛé"),
)
.into();
let expected = to_html("--ask-password --user 'uøý`¶'"'"'7ÅÛé' 'http://127.0.0.1:1234/geheime_dokumente.php");
assert_eq!(to_be_tested, expected);
}
#[test]
fn test_wget_footer_ip() {
let to_be_tested: String = wget_footer(&uri("http://127.0.0.1:420/"), None, None).into();
let expected = to_html("-P '127.0.0.1:420' 'http://127.0.0.1:420");
assert_eq!(to_be_tested, expected);
}
}
|
use actix::prelude::*;
use actix::Handler;
use bytes::Bytes;
use super::ChatServer;
#[derive(Message)]
#[rtype(result = "()")]
pub struct MessagePayload {
header: MessagePayloadHeader,
body: Bytes,
}
impl MessagePayload {
pub fn new(header: MessagePayloadHeader, body: Bytes) -> Self {
MessagePayload { header, body }
}
}
pub struct MessagePayloadHeader {
id: i64,
}
impl MessagePayloadHeader {
pub fn new(id: i64) -> Self {
MessagePayloadHeader { id }
}
}
impl Handler<MessagePayload> for ChatServer {
type Result = ();
fn handle(&mut self, msg: MessagePayload, _: &mut Context<Self>) {
self.push_message(msg.body).unwrap();
}
}
|
//! Watches a Kubernetes Resource for changes, with error recovery
//!
//! See [`watcher`] for the primary entry point.
use crate::utils::ResetTimerBackoff;
use async_trait::async_trait;
use backoff::{backoff::Backoff, ExponentialBackoff};
use derivative::Derivative;
use futures::{stream::BoxStream, Stream, StreamExt};
use kube_client::{
api::{ListParams, Resource, ResourceExt, VersionMatch, WatchEvent, WatchParams},
core::{metadata::PartialObjectMeta, ObjectList},
error::ErrorResponse,
Api, Error as ClientErr,
};
use serde::de::DeserializeOwned;
use smallvec::SmallVec;
use std::{clone::Clone, fmt::Debug, time::Duration};
use thiserror::Error;
use tracing::{debug, error, warn};
#[derive(Debug, Error)]
pub enum Error {
#[error("failed to perform initial object list: {0}")]
InitialListFailed(#[source] kube_client::Error),
#[error("failed to start watching object: {0}")]
WatchStartFailed(#[source] kube_client::Error),
#[error("error returned by apiserver during watch: {0}")]
WatchError(#[source] ErrorResponse),
#[error("watch stream failed: {0}")]
WatchFailed(#[source] kube_client::Error),
#[error("no metadata.resourceVersion in watch result (does resource support watch?)")]
NoResourceVersion,
#[error("too many objects matched search criteria")]
TooManyObjects,
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(Debug, Clone)]
/// Watch events returned from the [`watcher`]
pub enum Event<K> {
/// An object was added or modified
Applied(K),
/// An object was deleted
///
/// NOTE: This should not be used for managing persistent state elsewhere, since
/// events may be lost if the watcher is unavailable. Use Finalizers instead.
Deleted(K),
/// The watch stream was restarted, so `Deleted` events may have been missed
///
/// Should be used as a signal to replace the store contents atomically.
///
/// Any objects that were previously [`Applied`](Event::Applied) but are not listed in this event
/// should be assumed to have been [`Deleted`](Event::Deleted).
Restarted(Vec<K>),
}
impl<K> Event<K> {
/// Flattens out all objects that were added or modified in the event.
///
/// `Deleted` objects are ignored, all objects mentioned by `Restarted` events are
/// emitted individually.
pub fn into_iter_applied(self) -> impl Iterator<Item = K> {
match self {
Event::Applied(obj) => SmallVec::from_buf([obj]),
Event::Deleted(_) => SmallVec::new(),
Event::Restarted(objs) => SmallVec::from_vec(objs),
}
.into_iter()
}
/// Flattens out all objects that were added, modified, or deleted in the event.
///
/// Note that `Deleted` events may be missed when restarting the stream. Use finalizers
/// or owner references instead if you care about cleaning up external resources after
/// deleted objects.
pub fn into_iter_touched(self) -> impl Iterator<Item = K> {
match self {
Event::Applied(obj) | Event::Deleted(obj) => SmallVec::from_buf([obj]),
Event::Restarted(objs) => SmallVec::from_vec(objs),
}
.into_iter()
}
/// Map each object in an event through a mutator fn
///
/// This allows for memory optimizations in watch streams.
/// If you are chaining a watch stream into a reflector as an in memory state store,
/// you can control the space used by each object by dropping fields.
///
/// ```no_run
/// use k8s_openapi::api::core::v1::Pod;
/// use kube::ResourceExt;
/// # use kube::runtime::watcher::Event;
/// # let event: Event<Pod> = todo!();
/// event.modify(|pod| {
/// pod.managed_fields_mut().clear();
/// pod.annotations_mut().clear();
/// pod.status = None;
/// });
/// ```
#[must_use]
pub fn modify(mut self, mut f: impl FnMut(&mut K)) -> Self {
match &mut self {
Event::Applied(obj) | Event::Deleted(obj) => (f)(obj),
Event::Restarted(objs) => {
for k in objs {
(f)(k)
}
}
}
self
}
}
#[derive(Derivative)]
#[derivative(Debug)]
/// The internal finite state machine driving the [`watcher`]
enum State<K> {
/// The Watcher is empty, and the next [`poll`](Stream::poll_next) will start the initial LIST to get all existing objects
Empty,
/// The initial LIST was successful, so we should move on to starting the actual watch.
InitListed { resource_version: String },
/// The watch is in progress, from this point we just return events from the server.
///
/// If the connection is disrupted then we propagate the error but try to restart the watch stream by
/// returning to the `InitListed` state.
/// If we fall out of the K8s watch window then we propagate the error and fall back doing a re-list
/// with `Empty`.
Watching {
resource_version: String,
#[derivative(Debug = "ignore")]
stream: BoxStream<'static, kube_client::Result<WatchEvent<K>>>,
},
}
/// Used to control whether the watcher receives the full object, or only the
/// metadata
#[async_trait]
trait ApiMode {
type Value: Clone;
async fn list(&self, lp: &ListParams) -> kube_client::Result<ObjectList<Self::Value>>;
async fn watch(
&self,
wp: &WatchParams,
version: &str,
) -> kube_client::Result<BoxStream<'static, kube_client::Result<WatchEvent<Self::Value>>>>;
}
/// A wrapper around the `Api` of a `Resource` type that when used by the
/// watcher will return the entire (full) object
struct FullObject<'a, K> {
api: &'a Api<K>,
}
/// Configurable list semantics for `watcher` relists
#[derive(Clone, Default, Debug, PartialEq)]
pub enum ListSemantic {
/// List calls perform a full quorum read for most recent results
///
/// Prefer this if you have strong consistency requirements. Note that this
/// is more taxing for the apiserver and can be less scalable for the cluster.
///
/// If you are observing large resource sets (such as congested `Controller` cases),
/// you typically have a delay between the list call completing, and all the events
/// getting processed. In such cases, it is probably worth picking `Any` over `MostRecent`,
/// as your events are not guaranteed to be up-to-date by the time you get to them anyway.
#[default]
MostRecent,
/// List calls returns cached results from apiserver
///
/// This is faster and much less taxing on the apiserver, but can result
/// in much older results than has previously observed for `Restarted` events,
/// particularly in HA configurations, due to partitions or stale caches.
///
/// This option makes the most sense for controller usage where events have
/// some delay between being seen by the runtime, and it being sent to the reconciler.
Any,
}
/// Accumulates all options that can be used on the watcher invocation.
#[derive(Clone, Debug, PartialEq)]
pub struct Config {
/// A selector to restrict the list of returned objects by their labels.
///
/// Defaults to everything if `None`.
pub label_selector: Option<String>,
/// A selector to restrict the list of returned objects by their fields.
///
/// Defaults to everything if `None`.
pub field_selector: Option<String>,
/// Timeout for the list/watch call.
///
/// This limits the duration of the call, regardless of any activity or inactivity.
/// If unset for a watch call, we will use 290s.
/// We limit this to 295s due to [inherent watch limitations](https://github.com/kubernetes/kubernetes/issues/6513).
pub timeout: Option<u32>,
/// Semantics for list calls.
///
/// Configures re-list for performance vs. consistency.
pub list_semantic: ListSemantic,
/// Enables watch events with type "BOOKMARK".
///
/// Requests watch bookmarks from the apiserver when enabled for improved watch precision and reduced list calls.
/// This is default enabled and should generally not be turned off.
pub bookmarks: bool,
}
impl Default for Config {
fn default() -> Self {
Self {
bookmarks: true,
label_selector: None,
field_selector: None,
timeout: None,
list_semantic: ListSemantic::default(),
}
}
}
/// Builder interface to Config
///
/// Usage:
/// ```
/// use kube::runtime::watcher::Config;
/// let wc = Config::default()
/// .timeout(60)
/// .labels("kubernetes.io/lifecycle=spot");
/// ```
impl Config {
/// Configure the timeout for list/watch calls
///
/// This limits the duration of the call, regardless of any activity or inactivity.
/// Defaults to 290s
#[must_use]
pub fn timeout(mut self, timeout_secs: u32) -> Self {
self.timeout = Some(timeout_secs);
self
}
/// Configure the selector to restrict the list of returned objects by their fields.
///
/// Defaults to everything.
/// Supports `=`, `==`, `!=`, and can be comma separated: `key1=value1,key2=value2`.
/// The server only supports a limited number of field queries per type.
#[must_use]
pub fn fields(mut self, field_selector: &str) -> Self {
self.field_selector = Some(field_selector.to_string());
self
}
/// Configure the selector to restrict the list of returned objects by their labels.
///
/// Defaults to everything.
/// Supports `=`, `==`, `!=`, and can be comma separated: `key1=value1,key2=value2`.
#[must_use]
pub fn labels(mut self, label_selector: &str) -> Self {
self.label_selector = Some(label_selector.to_string());
self
}
/// Sets list semantic to configure re-list performance and consistency
#[must_use]
pub fn list_semantic(mut self, semantic: ListSemantic) -> Self {
self.list_semantic = semantic;
self
}
/// Sets list semantic to `Any` to improve list performance
#[must_use]
pub fn any_semantic(self) -> Self {
self.list_semantic(ListSemantic::Any)
}
/// Disables watch bookmarks to simplify watch handling
///
/// This is not recommended to use with production watchers as it can cause desyncs.
/// See [#219](https://github.com/kube-rs/kube/issues/219) for details.
#[must_use]
pub fn disable_bookmarks(mut self) -> Self {
self.bookmarks = false;
self
}
/// Converts generic `watcher::Config` structure to the instance of `ListParams` used for list requests.
fn to_list_params(&self) -> ListParams {
let (resource_version, version_match) = match self.list_semantic {
ListSemantic::Any => (Some("0".into()), Some(VersionMatch::NotOlderThan)),
ListSemantic::MostRecent => (None, None),
};
ListParams {
label_selector: self.label_selector.clone(),
field_selector: self.field_selector.clone(),
timeout: self.timeout,
version_match,
resource_version,
// It is not permissible for users to configure the continue token and limit for the watcher, as these parameters are associated with paging.
// The watcher must handle paging internally.
limit: None,
continue_token: None,
}
}
/// Converts generic `watcher::Config` structure to the instance of `WatchParams` used for watch requests.
fn to_watch_params(&self) -> WatchParams {
WatchParams {
label_selector: self.label_selector.clone(),
field_selector: self.field_selector.clone(),
timeout: self.timeout,
bookmarks: self.bookmarks,
}
}
}
#[async_trait]
impl<K> ApiMode for FullObject<'_, K>
where
K: Clone + Debug + DeserializeOwned + Send + 'static,
{
type Value = K;
async fn list(&self, lp: &ListParams) -> kube_client::Result<ObjectList<Self::Value>> {
self.api.list(lp).await
}
async fn watch(
&self,
wp: &WatchParams,
version: &str,
) -> kube_client::Result<BoxStream<'static, kube_client::Result<WatchEvent<Self::Value>>>> {
self.api.watch(wp, version).await.map(StreamExt::boxed)
}
}
/// A wrapper around the `Api` of a `Resource` type that when used by the
/// watcher will return only the metadata associated with an object
struct MetaOnly<'a, K> {
api: &'a Api<K>,
}
#[async_trait]
impl<K> ApiMode for MetaOnly<'_, K>
where
K: Clone + Debug + DeserializeOwned + Send + 'static,
{
type Value = PartialObjectMeta<K>;
async fn list(&self, lp: &ListParams) -> kube_client::Result<ObjectList<Self::Value>> {
self.api.list_metadata(lp).await
}
async fn watch(
&self,
wp: &WatchParams,
version: &str,
) -> kube_client::Result<BoxStream<'static, kube_client::Result<WatchEvent<Self::Value>>>> {
self.api.watch_metadata(wp, version).await.map(StreamExt::boxed)
}
}
/// Progresses the watcher a single step, returning (event, state)
///
/// This function should be trampolined: if event == `None`
/// then the function should be called again until it returns a Some.
async fn step_trampolined<A>(
api: &A,
wc: &Config,
state: State<A::Value>,
) -> (Option<Result<Event<A::Value>>>, State<A::Value>)
where
A: ApiMode,
A::Value: Resource + 'static,
{
match state {
State::Empty => match api.list(&wc.to_list_params()).await {
Ok(list) => {
if let Some(resource_version) = list.metadata.resource_version {
(Some(Ok(Event::Restarted(list.items))), State::InitListed {
resource_version,
})
} else {
(Some(Err(Error::NoResourceVersion)), State::Empty)
}
}
Err(err) => {
if std::matches!(err, ClientErr::Api(ErrorResponse { code: 403, .. })) {
warn!("watch list error with 403: {err:?}");
} else {
debug!("watch list error: {err:?}");
}
(Some(Err(err).map_err(Error::InitialListFailed)), State::Empty)
}
},
State::InitListed { resource_version } => {
match api.watch(&wc.to_watch_params(), &resource_version).await {
Ok(stream) => (None, State::Watching {
resource_version,
stream,
}),
Err(err) => {
if std::matches!(err, ClientErr::Api(ErrorResponse { code: 403, .. })) {
warn!("watch initlist error with 403: {err:?}");
} else {
debug!("watch initlist error: {err:?}");
}
(
Some(Err(err).map_err(Error::WatchStartFailed)),
State::InitListed { resource_version },
)
}
}
}
State::Watching {
resource_version,
mut stream,
} => match stream.next().await {
Some(Ok(WatchEvent::Added(obj) | WatchEvent::Modified(obj))) => {
let resource_version = obj.resource_version().unwrap();
(Some(Ok(Event::Applied(obj))), State::Watching {
resource_version,
stream,
})
}
Some(Ok(WatchEvent::Deleted(obj))) => {
let resource_version = obj.resource_version().unwrap();
(Some(Ok(Event::Deleted(obj))), State::Watching {
resource_version,
stream,
})
}
Some(Ok(WatchEvent::Bookmark(bm))) => (None, State::Watching {
resource_version: bm.metadata.resource_version,
stream,
}),
Some(Ok(WatchEvent::Error(err))) => {
// HTTP GONE, means we have desynced and need to start over and re-list :(
let new_state = if err.code == 410 {
State::Empty
} else {
State::Watching {
resource_version,
stream,
}
};
if err.code == 403 {
warn!("watcher watchevent error 403: {err:?}");
} else {
debug!("error watchevent error: {err:?}");
}
(Some(Err(err).map_err(Error::WatchError)), new_state)
}
Some(Err(err)) => {
if std::matches!(err, ClientErr::Api(ErrorResponse { code: 403, .. })) {
warn!("watcher error 403: {err:?}");
} else {
debug!("watcher error: {err:?}");
}
(Some(Err(err).map_err(Error::WatchFailed)), State::Watching {
resource_version,
stream,
})
}
None => (None, State::InitListed { resource_version }),
},
}
}
/// Trampoline helper for `step_trampolined`
async fn step<A>(
api: &A,
config: &Config,
mut state: State<A::Value>,
) -> (Result<Event<A::Value>>, State<A::Value>)
where
A: ApiMode,
A::Value: Resource + 'static,
{
loop {
match step_trampolined(api, config, state).await {
(Some(result), new_state) => return (result, new_state),
(None, new_state) => state = new_state,
}
}
}
/// Watches a Kubernetes Resource for changes continuously
///
/// Compared to [`Api::watch`], this automatically tries to recover the stream upon errors.
///
/// Errors from the underlying watch are propagated, after which the stream will go into recovery mode on the next poll.
/// You can apply your own backoff by not polling the stream for a duration after errors.
/// Keep in mind that some [`TryStream`](futures::TryStream) combinators (such as
/// [`try_for_each`](futures::TryStreamExt::try_for_each) and [`try_concat`](futures::TryStreamExt::try_concat))
/// will terminate eagerly as soon as they receive an [`Err`].
///
/// This is intended to provide a safe and atomic input interface for a state store like a [`reflector`].
/// Direct users may want to flatten composite events via [`WatchStreamExt`]:
///
/// ```no_run
/// use kube::{
/// api::{Api, ResourceExt}, Client,
/// runtime::{watcher, WatchStreamExt}
/// };
/// use k8s_openapi::api::core::v1::Pod;
/// use futures::TryStreamExt;
/// #[tokio::main]
/// async fn main() -> Result<(), watcher::Error> {
/// let client = Client::try_default().await.unwrap();
/// let pods: Api<Pod> = Api::namespaced(client, "apps");
///
/// watcher(pods, watcher::Config::default()).applied_objects()
/// .try_for_each(|p| async move {
/// println!("Applied: {}", p.name_any());
/// Ok(())
/// })
/// .await?;
/// Ok(())
/// }
/// ```
/// [`WatchStreamExt`]: super::WatchStreamExt
/// [`reflector`]: super::reflector::reflector
/// [`Api::watch`]: kube_client::Api::watch
///
/// # Recovery
///
/// The stream will attempt to be recovered on the next poll after an [`Err`] is returned.
/// This will normally happen immediately, but you can use [`StreamBackoff`](crate::utils::StreamBackoff)
/// to introduce an artificial delay. [`default_backoff`] returns a suitable default set of parameters.
///
/// If the watch connection is interrupted, then `watcher` will attempt to restart the watch using the last
/// [resource version](https://kubernetes.io/docs/reference/using-api/api-concepts/#efficient-detection-of-changes)
/// that we have seen on the stream. If this is successful then the stream is simply resumed from where it left off.
/// If this fails because the resource version is no longer valid then we start over with a new stream, starting with
/// an [`Event::Restarted`]. The internals mechanics of recovery should be considered an implementation detail.
pub fn watcher<K: Resource + Clone + DeserializeOwned + Debug + Send + 'static>(
api: Api<K>,
watcher_config: Config,
) -> impl Stream<Item = Result<Event<K>>> + Send {
futures::stream::unfold(
(api, watcher_config, State::Empty),
|(api, watcher_config, state)| async {
let (event, state) = step(&FullObject { api: &api }, &watcher_config, state).await;
Some((event, (api, watcher_config, state)))
},
)
}
/// Watches a Kubernetes Resource for changes continuously and receives only the
/// metadata
///
/// Compared to [`Api::watch_metadata`], this automatically tries to recover the stream upon errors.
///
/// Errors from the underlying watch are propagated, after which the stream will go into recovery mode on the next poll.
/// You can apply your own backoff by not polling the stream for a duration after errors.
/// Keep in mind that some [`TryStream`](futures::TryStream) combinators (such as
/// [`try_for_each`](futures::TryStreamExt::try_for_each) and [`try_concat`](futures::TryStreamExt::try_concat))
/// will terminate eagerly as soon as they receive an [`Err`].
///
/// This is intended to provide a safe and atomic input interface for a state store like a [`reflector`].
/// Direct users may want to flatten composite events via [`WatchStreamExt`]:
///
/// ```no_run
/// use kube::{
/// api::{Api, ResourceExt}, Client,
/// runtime::{watcher, metadata_watcher, WatchStreamExt}
/// };
/// use k8s_openapi::api::core::v1::Pod;
/// use futures::TryStreamExt;
/// #[tokio::main]
/// async fn main() -> Result<(), watcher::Error> {
/// let client = Client::try_default().await.unwrap();
/// let pods: Api<Pod> = Api::namespaced(client, "apps");
///
/// metadata_watcher(pods, watcher::Config::default()).applied_objects()
/// .try_for_each(|p| async move {
/// println!("Applied: {}", p.name_any());
/// Ok(())
/// })
/// .await?;
/// Ok(())
/// }
/// ```
/// [`WatchStreamExt`]: super::WatchStreamExt
/// [`reflector`]: super::reflector::reflector
/// [`Api::watch`]: kube_client::Api::watch
///
/// # Recovery
///
/// The stream will attempt to be recovered on the next poll after an [`Err`] is returned.
/// This will normally happen immediately, but you can use [`StreamBackoff`](crate::utils::StreamBackoff)
/// to introduce an artificial delay. [`default_backoff`] returns a suitable default set of parameters.
///
/// If the watch connection is interrupted, then `watcher` will attempt to restart the watch using the last
/// [resource version](https://kubernetes.io/docs/reference/using-api/api-concepts/#efficient-detection-of-changes)
/// that we have seen on the stream. If this is successful then the stream is simply resumed from where it left off.
/// If this fails because the resource version is no longer valid then we start over with a new stream, starting with
/// an [`Event::Restarted`]. The internals mechanics of recovery should be considered an implementation detail.
#[allow(clippy::module_name_repetitions)]
pub fn metadata_watcher<K: Resource + Clone + DeserializeOwned + Debug + Send + 'static>(
api: Api<K>,
watcher_config: Config,
) -> impl Stream<Item = Result<Event<PartialObjectMeta<K>>>> + Send {
futures::stream::unfold(
(api, watcher_config, State::Empty),
|(api, watcher_config, state)| async {
let (event, state) = step(&MetaOnly { api: &api }, &watcher_config, state).await;
Some((event, (api, watcher_config, state)))
},
)
}
/// Watch a single named object for updates
///
/// Emits `None` if the object is deleted (or not found), and `Some` if an object is updated (or created/found).
///
/// Compared to [`watcher`], `watch_object` does not return return [`Event`], since there is no need for an atomic
/// [`Event::Restarted`] when only one object is covered anyway.
pub fn watch_object<K: Resource + Clone + DeserializeOwned + Debug + Send + 'static>(
api: Api<K>,
name: &str,
) -> impl Stream<Item = Result<Option<K>>> + Send {
watcher(api, Config::default().fields(&format!("metadata.name={name}"))).map(|event| match event? {
Event::Deleted(_) => Ok(None),
// We're filtering by object name, so getting more than one object means that either:
// 1. The apiserver is accepting multiple objects with the same name, or
// 2. The apiserver is ignoring our query
// In either case, the K8s apiserver is broken and our API will return invalid data, so
// we had better bail out ASAP.
Event::Restarted(objs) if objs.len() > 1 => Err(Error::TooManyObjects),
Event::Restarted(mut objs) => Ok(objs.pop()),
Event::Applied(obj) => Ok(Some(obj)),
})
}
/// Default watch [`Backoff`] inspired by Kubernetes' client-go.
///
/// Note that the exact parameters used herein should not be considered stable.
/// The parameters currently optimize for being kind to struggling apiservers.
/// See [client-go's reflector source](https://github.com/kubernetes/client-go/blob/980663e185ab6fc79163b1c2565034f6d58368db/tools/cache/reflector.go#L177-L181)
/// for more details.
#[must_use]
pub fn default_backoff() -> impl Backoff + Send + Sync {
let expo = backoff::ExponentialBackoff {
initial_interval: Duration::from_millis(800),
max_interval: Duration::from_secs(30),
randomization_factor: 1.0,
multiplier: 2.0,
max_elapsed_time: None,
..ExponentialBackoff::default()
};
ResetTimerBackoff::new(expo, Duration::from_secs(120))
}
|
#![allow(proc_macro_derive_resolution_fallback)]
use super::schema::{accession_numbers, test_accession_numbers};
use bigdecimal::BigDecimal;
#[derive(Queryable, PartialEq, Eq, Debug)]
pub struct AccessionNumber {
pub id: i32,
pub accession_number: BigDecimal,
}
#[derive(Insertable)]
#[table_name = "accession_numbers"]
pub struct NewAccessionNumber {
pub accession_number: BigDecimal,
}
#[derive(Queryable, PartialEq, Eq, Debug)]
pub struct TestAccessionNumber {
pub id: i32,
pub accession_number: BigDecimal,
}
#[derive(Insertable)]
#[table_name = "test_accession_numbers"]
pub struct TestNewAccessionNumber {
pub accession_number: BigDecimal,
}
|
pub struct Solution {}
/**
https://leetcode.com/problems/buddy-strings/
**/
impl Solution {
pub fn buddy_strings(a: String, b: String) -> bool {
if a.len() < 2 || b.len() < 2 || a.len() != b.len() {
return false;
}
let mut letter = [0i32; 26];
let a_chars: Vec<_> = a.chars().collect();
let b_chars: Vec<_> = b.chars().collect();
if a == b {
for i in 0..a.len() {
letter[a_chars[i] as usize - 'a' as usize] += 1
}
for &x in letter.iter() {
if x > 1 {
return true
}
}
return false
}
let mut p1 = -1;
let mut p2 = -1;
for i in 0..a.len() {
if a_chars[i] != b_chars[i] {
if p1 == -1 {
p1 = i as i32
} else if p2 == -1 {
p2 = i as i32
} else {
return false
}
}
}
p2 != -1 && a_chars[p1 as usize] == b_chars[p2 as usize] && a_chars[p2 as usize] == b_chars[p1 as usize]
}
} |
mod argparser;
mod cacher;
mod config;
mod equity;
mod fetcher;
mod portfolio;
use argparser::parsearg;
use argparser::*;
use colored::*;
use config::read_user_from_file;
use fetcher::Fetcher;
use portfolio::{PortList, Portfolio, Presult};
use std::env;
use std::error;
use std::io::{self, Write};
use std::string::String;
// The main entry thread of the application
#[cfg(not(target_arch = "wasm32"))]
#[tokio::main]
async fn main() -> Result<(), Box<dyn error::Error>> {
run().await?;
Ok(())
}
// Testing function for Reqwest get, prints a Todo list
async fn display_stock(todo_id: i32) -> Result<(), Error> {
let url = format!("https://jsonplaceholder.typicode.com/todos/{}", todo_id);
let res = reqwest::get(url).await?;
println!("Status:{}", res.status());
let body = res.text().await?;
println!("Body:\n\n{}", body);
Ok(())
}
// Prints the Bloomy logo
fn print_logo() {
let logo = r###"
___ __
/ _ \ / / ___ ___ __ _ __ __
/ _ | / / / _ \/ _ \ / ' \ / // /
/____/ /_/ \___/\___//_/_/_/ \_, /
/___/"###;
println!("{}", logo.green());
}
pub fn init() {
println!("Initialized App");
}
// The main loop of the application, it is called in the main() function
async fn run() -> Result<(), Box<dyn error::Error>> {
let _client = reqwest::Client::new();
let mut port_tracker = PortList::empty_new();
let args: Vec<String> = env::args().skip(1).collect();
// load_config(&String::from("config.txt"))?;
let mut stdout = io::stdout();
let stdin = io::stdin();
let mut argparser: ArgParser;
print_logo();
let key = read_user_from_file("config.json").unwrap();
let api_key = key.alpha_vantage;
let fetcher = Fetcher::new("alpha_vantage".to_string(), api_key);
// Main loop for gathering user command and parsing
loop {
let mut buffer = String::new();
write!(stdout, "$ bloomy cmd> ")?;
stdout.flush()?;
stdin.read_line(&mut buffer)?;
if buffer.trim() == "q" || buffer.trim() == "quit" || buffer.trim() == "exit" {
break;
}
// Argparser parses the user arguments and returns argparser with Enum Commands
argparser = parsearg(&mut buffer)?;
// Enum commands are matched and corresponding fetcher or renderer executes the commands
match argparser.command {
Some(Command::Equity(ECmd)) => match ECmd {
ECmd::Price(ticker) => {
fetcher.search_equity(ticker).await?;
}
ECmd::Overview(ticker) => {
fetcher.equity_overview(ticker).await?;
}
_ => {
println!("Error: Equity")
}
},
Some(Command::Portfolio(PCmd)) => match PCmd {
PCmd::List => {
println!("List")
}
PCmd::ListPort(port) => {
println!("List Portfolio: {}", port)
}
PCmd::Make(port) => {
if let Presult::ADDED = port_tracker.add(port) {
println!("Portfolio Made");
} else {
println!("Portfolio name already exists, try a different name!");
}
}
_ => {
println!("Error: Make")
}
},
Some(Command::Market) => display_stock(5).await?,
Some(Command::Help) => println!("{}", "Display Help".cyan()),
Some(Command::Load) => fetcher.search_equity_demo("ibm".to_string()).await?,
_ => println!("Error"),
}
stdout.flush()?;
}
Ok(())
}
// Derives custom errors using thiserror crate
#[derive(Debug, thiserror::Error)]
enum Error {
#[error(transparent)]
CrosstermError(#[from] crossterm::ErrorKind),
#[error(transparent)]
IoError(#[from] io::Error),
#[error(transparent)]
ReqwestError(#[from] reqwest::Error),
#[error(transparent)]
SerdeError(#[from] serde_json::Error),
}
|
use bigint::U256;
use std::cmp;
use asm::opcode::*;
use errors::{Error, Result};
#[derive(Debug, PartialEq)]
pub enum Instruction {
// Stop and Arithmetic Operations
STOP,
ADD,
MUL,
SUB,
DIV,
SDIV,
MOD,
SMOD,
ADDMOD,
MULMOD,
EXP,
SIGNEXTEND,
LT,
GT,
SLT,
SGT,
EQ,
ISZERO,
AND,
OR,
XOR,
NOT,
BYTE,
// SHA3
SHA3,
// Environmental information
ADDRESS,
BALANCE,
ORIGIN,
CALLER,
CALLVALUE,
CALLDATALOAD,
CALLDATASIZE,
CALLDATACOPY,
CODESIZE,
CODECOPY,
GASPRICE,
EXTCODESIZE,
EXTCODECOPY,
RETURNDATASIZE,
RETURNDATACOPY,
BLOCKHASH,
COINBASE,
TIMESTAMP,
NUMBER,
DIFFICULTY,
GASLIMIT,
// Stack, Memory, Storage and Flow Operations
POP,
MLOAD,
MSTORE,
MSTORE8,
SLOAD,
SSTORE,
JUMP,
JUMPI,
PC,
MSIZE,
GAS,
JUMPDEST,
// Push Operations
PUSH(U256),
// Duplication Operations
DUP(usize),
// Exchange Operations
SWAP(usize),
// Logging Operations
LOG(usize),
// System Operations
CREATE,
CALL,
CALLCODE,
RETURN,
DELEGATECALL,
STATICCALL,
REVERT,
INVALID,
SELFDESTRUCT,
}
pub struct ProgramReader {
code: Vec<u8>,
pub position: usize,
}
impl ProgramReader {
pub fn new(code: Vec<u8>) -> Self {
Self { code, position: 0 }
}
fn read_bytes(&mut self, size: usize) -> Result<Vec<u8>> {
let start = self.position;
if start >= self.code.len() {
return Ok(vec![0; size]);
}
let end = cmp::min(start + size, self.code.len());
let mut result = self.code[start..end].to_vec();
self.position = end;
result.resize(size, 0);
Ok(result)
}
pub fn is_done(&self) -> bool {
self.position >= self.code.len()
}
pub fn size(&self) -> usize {
self.code.len()
}
pub fn jump(&mut self, position: usize) {
self.position = position;
}
pub fn next_instruction(&mut self) -> Result<Instruction> {
if self.position >= self.code.len() {
return Err(Error::PcOverflow);
}
let opcode = self.code[self.position];
self.position += 1;
let instruction = match opcode {
STOP => Instruction::STOP,
ADD => Instruction::ADD,
MUL => Instruction::MUL,
SUB => Instruction::SUB,
DIV => Instruction::DIV,
SDIV => Instruction::SDIV,
MOD => Instruction::MOD,
SMOD => Instruction::SMOD,
ADDMOD => Instruction::ADDMOD,
MULMOD => Instruction::MULMOD,
EXP => Instruction::EXP,
SIGNEXTEND => Instruction::SIGNEXTEND,
LT => Instruction::LT,
GT => Instruction::GT,
SLT => Instruction::SLT,
SGT => Instruction::SGT,
EQ => Instruction::EQ,
ISZERO => Instruction::ISZERO,
AND => Instruction::AND,
OR => Instruction::OR,
XOR => Instruction::XOR,
NOT => Instruction::NOT,
BYTE => Instruction::BYTE,
SHA3 => Instruction::SHA3,
ADDRESS => Instruction::ADDRESS,
BALANCE => Instruction::BALANCE,
ORIGIN => Instruction::ORIGIN,
CALLER => Instruction::CALLER,
CALLVALUE => Instruction::CALLVALUE,
CALLDATALOAD => Instruction::CALLDATALOAD,
CALLDATASIZE => Instruction::CALLDATASIZE,
CALLDATACOPY => Instruction::CALLDATACOPY,
CODESIZE => Instruction::CODESIZE,
CODECOPY => Instruction::CODECOPY,
GASPRICE => Instruction::GASPRICE,
EXTCODESIZE => Instruction::EXTCODESIZE,
EXTCODECOPY => Instruction::EXTCODECOPY,
RETURNDATASIZE => Instruction::RETURNDATASIZE,
RETURNDATACOPY => Instruction::RETURNDATACOPY,
BLOCKHASH => Instruction::BLOCKHASH,
COINBASE => Instruction::COINBASE,
TIMESTAMP => Instruction::TIMESTAMP,
NUMBER => Instruction::NUMBER,
DIFFICULTY => Instruction::DIFFICULTY,
GASLIMIT => Instruction::GASLIMIT,
POP => Instruction::POP,
MLOAD => Instruction::MLOAD,
MSTORE => Instruction::MSTORE,
MSTORE8 => Instruction::MSTORE8,
SLOAD => Instruction::SLOAD,
SSTORE => Instruction::SSTORE,
JUMP => Instruction::JUMP,
JUMPI => Instruction::JUMPI,
PC => Instruction::PC,
MSIZE => Instruction::MSIZE,
GAS => Instruction::GAS,
JUMPDEST => Instruction::JUMPDEST,
PUSH1...PUSH32 => {
let size = usize::from(opcode - PUSH1 + 1);
// position is automatically incremented when reading bytes
let res = self.read_bytes(size);
match res {
Ok(bytes) => Instruction::PUSH(U256::from(bytes.as_slice())),
Err(_) => return Err(Error::MissingOperand),
}
}
DUP1...DUP16 => {
let size = (opcode - DUP1 + 1) as usize;
Instruction::DUP(size)
}
SWAP1...SWAP16 => {
let size = (opcode - SWAP1 + 1) as usize;
Instruction::SWAP(size)
}
LOG0...LOG4 => {
let size = (opcode - LOG0) as usize;
Instruction::LOG(size)
}
CREATE => Instruction::CREATE,
CALL => Instruction::CALL,
CALLCODE => Instruction::CALLCODE,
RETURN => Instruction::RETURN,
DELEGATECALL => Instruction::DELEGATECALL,
STATICCALL => Instruction::STATICCALL,
REVERT => Instruction::REVERT,
INVALID => return Err(Error::InvalidOpcode(opcode)),
SELFDESTRUCT => Instruction::SELFDESTRUCT,
_ => return Err(Error::InvalidOpcode(opcode)),
};
Ok(instruction)
}
}
#[cfg(test)]
mod tests {
use asm::instruction::{Instruction, ProgramReader};
use asm::opcode::*;
use bigint::U256;
use errors::Error;
#[test]
fn test_program_reader() {
let mut reader = ProgramReader::new(vec![PUSH1, 0x1]);
assert_eq!(
reader.next_instruction().unwrap(),
Instruction::PUSH(U256::from(0x1))
);
assert_eq!(reader.next_instruction(), Err(Error::PcOverflow));
}
#[test]
fn test_program_reader_no_operand() {
for push in PUSH1..PUSH32 {
let code = vec![push];
let mut reader = ProgramReader::new(code);
assert!(reader.next_instruction().is_ok());
}
}
}
|
use crate::Error;
use chrono::{DateTime, TimeZone, Utc};
use oauth2::AccessToken;
use serde::{de, Deserialize, Deserializer};
use std::str::FromStr;
#[derive(Debug, Clone, Deserialize)]
struct _LoginResponse {
token_type: String,
expires_in: u64,
ext_expires_in: u64,
expires_on: Option<String>,
not_before: Option<String>,
resource: Option<String>,
access_token: String,
}
#[derive(Debug, Clone)]
pub struct LoginResponse {
pub token_type: String,
pub expires_in: u64,
pub ext_expires_in: u64,
pub expires_on: Option<DateTime<Utc>>,
pub not_before: Option<DateTime<Utc>>,
pub resource: Option<String>,
pub access_token: AccessToken,
}
impl FromStr for LoginResponse {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(serde_json::from_str(s)?)
}
}
impl<'de> Deserialize<'de> for LoginResponse {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let resp = _LoginResponse::deserialize(deserializer)?;
LoginResponse::from_base_response(resp).map_err(de::Error::custom)
}
}
impl LoginResponse {
pub fn access_token(&self) -> &AccessToken {
&self.access_token
}
fn from_base_response(r: _LoginResponse) -> Result<LoginResponse, Error> {
let expires_on: Option<DateTime<Utc>> = match r.expires_on {
Some(d) => Some(Utc.timestamp(d.parse()?, 0)),
None => None,
};
let not_before: Option<DateTime<Utc>> = match r.not_before {
Some(d) => Some(Utc.timestamp(d.parse()?, 0)),
None => None,
};
Ok(LoginResponse {
token_type: r.token_type,
expires_in: r.expires_in,
ext_expires_in: r.ext_expires_in,
expires_on,
not_before,
resource: r.resource,
access_token: AccessToken::new(r.access_token),
})
}
}
|
use anyhow::{anyhow, bail, Result};
use reqwest::blocking::Client;
use serde::{de::DeserializeOwned, Serialize};
use serde_json::Value;
use std::collections::HashMap;
/// A simple HTTP wrapper for communicating with an ElasticSearch database.
pub struct Database {
url: String,
dry_run: bool,
}
impl Database {
pub fn new(url: String, dry_run: bool) -> Self {
Self {
url: url.trim_end_matches("/").to_string(),
dry_run,
}
}
/// Use the ElasticSearch [Get
/// API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html)
/// to verify if a document exists in `index` with the given `id`.
#[allow(dead_code)]
pub fn exists(&self, index: &str, id: &str) -> Result<bool> {
let url = format!("{}/{}/_doc/{}", self.url, index, id);
if self.dry_run {
Ok(true)
} else {
let client = Client::new();
let response = client.head(url).send()?;
Ok(response.status().is_success())
}
}
/// Retrieve an object from the database, if it exists.
pub fn get<'a, T>(&self, index: &str, id: &str) -> Result<T>
where
T: DeserializeOwned,
{
let url = format!("{}/{}/_doc/{}", self.url, index, id);
if self.dry_run {
bail!("could not retrieve object with ID: {}", id);
} else {
let client = Client::new();
let response = client.get(url).send()?;
let bytes = response.bytes()?;
Ok(serde_json::from_slice(bytes.as_ref())?)
}
}
/// Create an object in the database, reusing an existing object if
/// possible. This function handles several cases:
/// 1. if the ID is not used in the database, create the object
/// 2. if the ID is used and the existing object matches `object`, simply
/// return the ID without creating a new database entry
/// 3. if the ID is used and the existing object does not match `object`,
/// append a `!` to the ID and retry (up to 5 times).
pub fn create_if_not_exists<'a, T>(&self, index: &str, object: &T, id: &str) -> Result<String>
where
T: DeserializeOwned + Serialize + PartialEq,
{
let mut id = id.to_string();
for _ in 0..NUM_RETRIES {
match self.get(index, &id) {
Ok(stored_object) => {
if object == &stored_object {
// Case #2: the same object already exists in the
// database; simply return the ID.
return Ok(id);
} else {
// Case #3: a different object exists with the same ID;
// change the ID and retry.
id.push('!');
}
}
Err(_) => {
// Case #1: no object exists with the ID; create it.
return self.create(index, object, Some(&id));
}
}
}
Err(anyhow!("failed to find a usable ID"))
}
/// Use the ElasticSearch [Index
/// API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html)
/// to add a document to `index`, optionally with the given `id`.
pub fn create<T>(&self, index: &str, object: &T, id: Option<&str>) -> Result<String>
where
T: Serialize,
{
log::debug!("Creating record in '{}' with ID {:?}", index, id);
let url = if let Some(id) = id {
format!("{}/{}/_doc/{}", self.url, index, id)
} else {
format!("{}/{}/_doc", self.url, index)
};
let body = serde_json::to_string(object)?;
log::trace!("Record body: {}", &body);
if self.dry_run {
Ok(if let Some(id) = id {
id.to_string()
} else {
"dry-run-id".to_string()
})
} else {
let client = Client::new();
let response = client
.post(url)
.header("Content-Type", "application/json")
.body(body)
.send()?;
let success = response.status().is_success();
let content: HashMap<String, Value> = serde_json::from_slice(&response.bytes()?)?;
log::debug!("ElasticSearch response: {:?}", content);
if success {
let id = content.get("_id").unwrap().as_str().unwrap().to_string();
Ok(id)
} else {
bail!("Failed to create record: {:?}", content)
}
}
}
/// Use the ElasticSearch [Bulk
/// API](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html)
/// to add many objects to `index`. This should be significantly faster than
/// `create`.
pub fn create_batched<T>(&self, index: &str, objects: &[T]) -> Result<()>
where
T: Serialize,
{
log::debug!("Batching up {} records to index '{}'", objects.len(), index);
let url = format!("{}/{}/_bulk", self.url, index);
// Capture all of the records as index requests.
let mut body = vec![];
for o in objects {
body.push("{\"index\": {}}\n".to_string());
body.push(format!("{}\n", serde_json::to_string(o)?));
}
let body = body.concat();
// Upload the entire batch request to ElasticSearch.
if self.dry_run {
log::trace!("Record body: {}", &body);
Ok(())
} else {
let client = Client::new();
let response = client
.post(url)
.header("Content-Type", "application/x-ndjson")
.body(body)
.send()?;
let success = response.status().is_success();
let content: HashMap<String, Value> = serde_json::from_slice(&response.bytes()?)?;
log::debug!("ElasticSearch response: {:?}", content);
if success {
Ok(())
} else {
bail!("Failed to batch-create records: {:?}", content)
}
}
}
}
const NUM_RETRIES: i32 = 5;
|
/*
На олимпиаду по программированию приходит N человек. На регистрации вместо
своего имени они передают организатору шифр — строку, состоящую из букв и
цифр. Гарантируется, что в этой строке содержится единственная
последовательность цифр, образующая целое положительное число M. Если
символы шифра, стоящие на позициях кратных M (нумерация с единицы),
составляют корректное имя участника (регистр не учитывается), он может
пройти на олимпиаду. Желающих попасть на олимпиаду достаточно много,
поэтому участники могут шифровать одно и то же имя несколько раз — в таком
случае проходит только первый из них. Помогите организаторам определить,
кого из пришедших участников нужно пустить на олимпиаду.
*/
use std::collections::HashMap;
pub fn main() {
let mut buffer = String::new();
std::io::stdin()
.read_line(&mut buffer)
.expect("Failed to read a line");
let mut values = buffer
.trim()
.split(" ")
.map(|x| x.parse::<usize>().expect("Please, enter valid number"));
let participants = values.next().unwrap();
let comes = values.next().unwrap();
let mut names: HashMap<String, String> = HashMap::with_capacity(participants);
for _ in 0..participants {
let mut name = String::new();
std::io::stdin()
.read_line(&mut name)
.expect("Failed to read a line");
let name = name.trim().to_string();
names.insert(name.to_lowercase(), name);
}
let mut result: Vec<String> = Vec::new();
for _ in 0..comes {
let mut cipher = String::new();
std::io::stdin()
.read_line(&mut cipher)
.expect("Failed to read a line");
let trimmed = cipher.trim();
let factor: usize = trimmed
.chars()
.filter(char::is_ascii_digit)
.collect::<String>()
.parse()
.unwrap();
let decoded: String = trimmed
.chars()
.skip(factor - 1)
.step_by(factor)
.collect::<String>()
.to_lowercase();
let contains = names.contains_key(&decoded);
if contains {
result.push(names[&decoded].clone());
names.remove(&decoded);
}
}
let output = result.join("\n");
println!("{}", if result.is_empty() { "NO" } else { &output });
}
|
use std::collections::HashMap;
use std::io::{self, BufRead};
#[derive(PartialEq, PartialOrd, Eq, Ord, Clone, Debug)]
enum Rule {
Number(usize),
Char(char),
Union(Vec<Rule>),
Concat(Vec<Rule>),
}
type Rules = HashMap<usize, Rule>;
fn parse_rule_value(value: &str) -> Option<Rule> {
if value.starts_with('"') {
let arg = value.chars().nth(1)?;
Some(Rule::Char(arg))
} else if value.contains(" | ") {
let arg = value.split(" | ").filter_map(parse_rule_value).collect();
Some(Rule::Union(arg))
} else if value.contains(' ') {
let arg = value.split(' ').filter_map(parse_rule_value).collect();
Some(Rule::Concat(arg))
} else {
let arg = value.parse().ok()?;
Some(Rule::Number(arg))
}
}
fn parse_rule(line: &str) -> Option<(usize, Rule)> {
let mut it = line.split(": ");
let id = it.next()?.parse().ok()?;
let value = it.next()?.to_string();
assert!(it.next().is_none());
Some((id, parse_rule_value(&value)?))
}
fn parse(lines: &[String]) -> Option<(Rules, Vec<String>)> {
let mut it = lines.split(|x| x.is_empty());
let rules = it.next()?.iter().filter_map(|x| parse_rule(x)).collect();
let data = it.next()?.to_vec();
Some((rules, data))
}
fn solve(rules: &Rules, data: &[String]) -> usize {
let rule = Rule::Number(0);
data.iter()
.filter(|x| do_match(rules, x, &rule).contains(&""))
.count()
}
fn do_match<'a>(rules: &Rules, x: &'a str, r: &Rule) -> Vec<&'a str> {
match r {
Rule::Number(n) => do_match(rules, x, rules.get(&n).unwrap()),
Rule::Char(c) => {
if x.starts_with(*c) {
vec![&x[c.len_utf8()..]]
} else {
vec![]
}
}
Rule::Union(rs) => match &rs[..] {
[] => vec![],
[r, rs @ ..] => do_match(rules, x, r)
.iter()
.chain(do_match(rules, x, &Rule::Union(rs.to_vec())).iter())
.cloned()
.collect(),
},
Rule::Concat(rs) => match &rs[..] {
[] => vec![x],
[r, rs @ ..] => do_match(rules, x, r)
.iter()
.flat_map(|y| do_match(rules, y, &Rule::Concat(rs.to_vec())))
.collect(),
},
}
}
fn main() {
let lines: Vec<_> = io::stdin().lock().lines().filter_map(|x| x.ok()).collect();
let (mut rules, data) = parse(&lines).unwrap();
let result = solve(&rules, &data);
println!("Part 1: {}", result);
rules
.entry(8)
.and_modify(|e| *e = parse_rule_value("42 | 42 8").unwrap());
rules
.entry(11)
.and_modify(|e| *e = parse_rule_value("42 31 | 42 11 31").unwrap());
let result = solve(&rules, &data);
println!("Part 2: {:?}", result);
}
|
use std::io::prelude::*;
use std::fs::File;
use std::env;
use std::process;
// # Traits
trait Item {
fn make_html(&self) -> String;
}
trait Link : Item {}
trait Tray : Item {
fn add(&mut self, item: Box<Item>);
}
trait Page {
fn make_html(&self) -> String;
}
trait Factory {
type LinkObject;
type TrayObject;
type PageObject;
fn create_link(&self, caption: String, url: String) -> Box<Self::LinkObject>;
fn create_tray(&self, caption: String) -> Box<Self::TrayObject>;
fn create_page(&self, title: String, author: String) -> Box<Self::PageObject>;
}
// # List
// ## ListLink
struct ListLink {
caption: String,
url: String,
}
impl ListLink {
fn new(caption: String, url: String) -> ListLink {
ListLink {
caption: caption,
url: url,
}
}
}
impl Item for ListLink {
fn make_html(&self) -> String {
format!("<li><a href=\"{}\">{}</a></li>\n", self.url, self.caption)
}
}
impl Link for ListLink {}
// ## ListTray
struct ListTray {
caption: String,
tray: Vec<Box<Item>>,
}
impl ListTray {
fn new(caption: String) -> ListTray {
ListTray {
caption: caption,
tray: Vec::new(),
}
}
}
impl Item for ListTray {
fn make_html(&self) -> String {
let mut buffer = "".to_string();
buffer.push_str("<li>\n");
buffer.push_str(&self.caption);
buffer.push_str("\n<ul>\n");
for item in &self.tray {
buffer.push_str(&item.make_html());
}
buffer.push_str("</ul>\n");
buffer.push_str("</li>\n");
buffer
}
}
impl Tray for ListTray {
fn add(&mut self, item: Box<Item>) {
self.tray.push(item);
}
}
// ## ListPage
struct ListPage {
title: String,
author: String,
content: Vec<Box<Item>>,
}
impl ListPage {
fn new(title: String, author: String) -> ListPage {
ListPage {
title: title,
author: author,
content: Vec::new(),
}
}
fn add(&mut self, item: Box<Item>) {
self.content.push(item);
}
#[allow(unused_must_use)]
fn output(&self) {
let file_name = format!("{}.html", self.title);
let mut writer = File::create(file_name.clone()).expect("Unable to create file");
writeln!(writer, "{}", self.make_html());
println!("{}を作成しました。", file_name);
}
}
impl Page for ListPage {
fn make_html(&self) -> String {
let mut buffer = "".to_string();
buffer.push_str(&format!("<html><head><title>{}</title></head>\n", self.title));
buffer.push_str("<body>\n");
buffer.push_str(&format!("<h1>{}</h1>\n", self.title));
buffer.push_str("<ul>\n");
for item in &self.content {
buffer.push_str(&item.make_html());
}
buffer.push_str("</ul>\n");
buffer.push_str(&format!("<hr><address>{}</address>", self.author));
buffer.push_str("</body></html>\n");
buffer
}
}
// ## ListFactory
struct ListFactory {}
impl Factory for ListFactory {
type LinkObject = ListLink;
type TrayObject = ListTray;
type PageObject = ListPage;
fn create_link(&self, caption: String, url: String) -> Box<Self::LinkObject> {
Box::new(ListLink::new(caption, url))
}
fn create_tray(&self, caption: String) -> Box<Self::TrayObject> {
Box::new(ListTray::new(caption))
}
fn create_page(&self, title: String, author: String) -> Box<Self::PageObject> {
Box::new(ListPage::new(title, author))
}
}
// # Table
// ## TableLink
struct TableLink {
caption: String,
url: String,
}
impl TableLink {
fn new(caption: String, url: String) -> TableLink {
TableLink {
caption: caption,
url: url,
}
}
}
impl Item for TableLink {
fn make_html(&self) -> String {
format!("<td><a href=\"{}\">{}</a></td>\n", self.url, self.caption)
}
}
impl Link for TableLink {}
// ## TableTray
struct TableTray {
caption: String,
tray: Vec<Box<Item>>,
}
impl TableTray {
fn new(caption: String) -> TableTray {
TableTray {
caption: caption,
tray: Vec::new(),
}
}
}
impl Item for TableTray {
fn make_html(&self) -> String {
let mut buffer = "".to_string();
buffer.push_str("<td>");
buffer.push_str("<table width=\"100%\" border=\"1\"<tr>");
buffer.push_str(&format!("<td bgcolor=\"#cccccc\" align=\"center\"
colspan=\"{}\"><b>{}</b></td>",
self.tray.len(), self.caption));
buffer.push_str("</tr>\n");
buffer.push_str("<tr>\n");
for item in &self.tray {
buffer.push_str(&item.make_html());
}
buffer.push_str("</tr></table>");
buffer.push_str("</td>");
buffer
}
}
impl Tray for TableTray {
fn add(&mut self, item: Box<Item>) {
self.tray.push(item);
}
}
// ## TablePage
struct TablePage {
title: String,
author: String,
content: Vec<Box<Item>>,
}
impl TablePage {
fn new(title: String, author: String) -> TablePage {
TablePage {
title: title,
author: author,
content: Vec::new(),
}
}
fn add(&mut self, item: Box<Item>) {
self.content.push(item);
}
#[allow(unused_must_use)]
fn output(&self) {
let file_name = format!("{}.html", self.title);
let mut writer = File::create(file_name.clone()).expect("Unable to create file");
writeln!(writer, "{}", self.make_html());
println!("{}を作成しました。", file_name);
}
}
impl Page for TablePage {
fn make_html(&self) -> String {
let mut buffer = "".to_string();
buffer.push_str(&format!("<html><head><title>{}</title></head>\n", self.title));
buffer.push_str("<body>\n");
buffer.push_str(&format!("<h1>{}</h1>\n", self.title));
buffer.push_str("<ul>\n");
for item in &self.content {
buffer.push_str(&item.make_html());
}
buffer.push_str("</ul>\n");
buffer.push_str(&format!("<hr><address>{}</address>", self.author));
buffer.push_str("</body></html>\n");
buffer
}
}
// ## TableFactory
struct TableFactory {}
impl Factory for TableFactory {
type LinkObject = TableLink;
type TrayObject = TableTray;
type PageObject = TablePage;
fn create_link(&self, caption: String, url: String) -> Box<Self::LinkObject> {
Box::new(TableLink::new(caption, url))
}
fn create_tray(&self, caption: String) -> Box<Self::TrayObject> {
Box::new(TableTray::new(caption))
}
fn create_page(&self, title: String, author: String) -> Box<Self::PageObject> {
Box::new(TablePage::new(title, author))
}
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len()-1 != 1 {
println!("Usage: cargo run list");
println!("Usage: cargo run table");
process::exit(0);
}
match args[1].as_str() {
"list" => create_list(),
"table" => create_table(),
_ => process::exit(0),
};
}
fn create_list() {
let factory = ListFactory{};
let asahi = factory.create_link("朝日新聞".to_string(), "http://www.asahi.com/".to_string());
let yomiuri = factory.create_link("読売新聞".to_string(), "http://www.yomiuri.co.jp/".to_string());
let us_yahoo = factory.create_link("Yahoo!".to_string(), "http://www.yahoo.com/".to_string());
let jp_yahoo = factory.create_link("Yahoo!Japan".to_string(), "http://www.yahoo.co.jp/".to_string());
let excite = factory.create_link("Excite".to_string(), "http://www.excite.com/".to_string());
let google = factory.create_link("Google".to_string(), "http://www.google.com/".to_string());
let mut tray_news = factory.create_tray("新聞".to_string());
tray_news.add(asahi);
tray_news.add(yomiuri);
let mut tray_yahoo = factory.create_tray("Yahoo!".to_string());
tray_yahoo.add(us_yahoo);
tray_yahoo.add(jp_yahoo);
let mut tray_search = factory.create_tray("サーチエンジン".to_string());
tray_search.add(tray_yahoo);
tray_search.add(excite);
tray_search.add(google);
let mut page = factory.create_page("LinkPage".to_string(), "結城 浩".to_string());
page.add(tray_news);
page.add(tray_search);
page.output();
}
fn create_table() {
let factory = TableFactory{};
let asahi = factory.create_link("朝日新聞".to_string(), "http://www.asahi.com/".to_string());
let yomiuri = factory.create_link("読売新聞".to_string(), "http://www.yomiuri.co.jp/".to_string());
let us_yahoo = factory.create_link("Yahoo!".to_string(), "http://www.yahoo.com/".to_string());
let jp_yahoo = factory.create_link("Yahoo!Japan".to_string(), "http://www.yahoo.co.jp/".to_string());
let excite = factory.create_link("Excite".to_string(), "http://www.excite.com/".to_string());
let google = factory.create_link("Google".to_string(), "http://www.google.com/".to_string());
let mut tray_news = factory.create_tray("新聞".to_string());
tray_news.add(asahi);
tray_news.add(yomiuri);
let mut tray_yahoo = factory.create_tray("Yahoo!".to_string());
tray_yahoo.add(us_yahoo);
tray_yahoo.add(jp_yahoo);
let mut tray_search = factory.create_tray("サーチエンジン".to_string());
tray_search.add(tray_yahoo);
tray_search.add(excite);
tray_search.add(google);
let mut page = factory.create_page("LinkPage".to_string(), "結城 浩".to_string());
page.add(tray_news);
page.add(tray_search);
page.output();
}
|
use crate::boards::Board;
use serde::{Deserialize, Serialize};
use actix_web::{HttpResponse, HttpRequest, Responder, Error};
use futures::future::{ready, Ready};
use sqlx::{postgres::{PgPoolOptions, PgRow, PgDone}, query_as};
use sqlx::query::Query;
use sqlx::{FromRow, Row, Pool, Postgres, query};
use anyhow::Result;
use crate::utils::get_unix_timestamp_ms;
// for user input
#[derive(Deserialize, Serialize)]
pub struct PostRequest {
pub subject: Option<String>,
pub body: Option<String>,
pub board_id: i32
}
// db representation
#[derive(Serialize, FromRow, Clone)]
pub struct Post {
pub id: i32,
pub is_oppost: bool,
pub subject: Option<String>,
pub body: Option<String>,
pub created_at: i64,
pub board_id: i32,
pub oppost_id: Option<i32>,
pub is_locked: bool,
}
// implementation of Responder for Post to return Post from action handler
impl Responder for Post {
type Error = Error;
type Future = Ready<Result<HttpResponse, Error>>;
fn respond_to(self, req: &HttpRequest) -> Self::Future {
let body = serde_json::to_string(&self).unwrap();
ready(Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(body)
))
}
}
impl Post {
pub async fn find_all_opposts(pool: &Pool<Postgres>) -> Result<Vec<Post>> {
let mut posts: Vec<Post> = vec![];
let records = query_as::<_, Post>(
r#"
SELECT
id, is_oppost, subject, body, subject, body, created_at
FROM posts
WHERE (is_oppost = TRUE)
ORDER BY created_at;
"#
).fetch_all(pool).await?;
for record in records {
// this really need a simpler conversion method
posts.push(Post {
id: record.id,
is_oppost: record.is_oppost,
body: Some(record.body.unwrap()),
subject: Some(record.subject.unwrap()),
created_at: record.created_at,
board_id: record.board_id,
oppost_id: None,
is_locked: record.is_locked,
});
}
Ok(posts)
}
pub async fn find_by_id(pool: &Pool<Postgres>, id: i32) -> Result<Post, sqlx::Error> {
let mut tx = pool.begin().await?; // transaction
let post = sqlx::query_as::<_, Post>(
r#"
SELECT id, is_oppost, subject, body, subject, body, created_at, board_id, is_locked
FROM posts
WHERE (id = $1);
"#
).bind(&id).fetch_one(&mut tx).await?;
Ok(post)
}
// get number of posts in a thread
pub async fn count_by_oppost_id(post: Self, pool: &Pool<Postgres>) -> Result<i64, sqlx::Error> {
let oppost_id: i32 = if post.is_oppost {post.id} else {post.oppost_id.unwrap()};
let count = sqlx::query!(
"SELECT COUNT(*) as count FROM posts WHERE (oppost_id = $1);", oppost_id
).fetch_all(pool).await?;
// holy fuck wtf is this
Ok(count[0].count.unwrap())
}
pub async fn find_by_oppost_id(pool: &Pool<Postgres>, oppost_id: i32) -> Result<Vec<Post>, sqlx::Error> {
let mut posts: Vec<Post> = vec![];
let records = query_as::<_, Post>(
r#"
SELECT id, is_oppost, subject, body, subject, body, created_at, is_locked
FROM posts WHERE (id = $1)
UNION
SELECT id, is_oppost, subject, body, subject, body, created_at, FALSE as is_locked
FROM posts WHERE (oppost_id = $1)
ORDER BY created_at;
"#
).bind(&oppost_id).fetch_all(pool).await?;
for record in records {
// this really need a simpler conversion method
posts.push(Post {
id: record.id,
is_oppost: record.is_oppost,
body: Some(record.body.unwrap()),
subject: Some(record.subject.unwrap()),
created_at: record.created_at,
board_id: record.board_id,
oppost_id: Some(oppost_id),
is_locked: record.is_locked,
});
}
Ok(posts)
}
pub async fn find_opposts_by_board_id(pool: &Pool<Postgres>, board_id: i32) -> Result<Vec<Post>, sqlx::Error> {
let mut posts: Vec<Post> = vec![];
let records = sqlx::query_as::<_, Post>(
r#"
SELECT id, is_oppost, subject, body, subject, body, created_at, board_id, is_locked
FROM posts
WHERE (board_id = $1, oppost = TRUE);
"#
).bind(&board_id).fetch_all(pool).await?;
for record in records {
posts.push(Post {
id: record.id,
is_oppost: record.is_oppost,
body: Some(record.body.unwrap()),
subject: Some(record.subject.unwrap()),
created_at: record.created_at,
board_id: record.board_id,
oppost_id: None,
is_locked: record.is_locked
});
}
Ok(posts)
}
pub async fn create(pool: &Pool<Postgres>, post: PostRequest, oppost_id: Option<i32>) -> Result<Post> {
let mut tx = pool.begin().await?; // transaction
let result: Post;
match oppost_id {
Some(op ) => {
result = sqlx::query_as::<_, Post>(
r#"INSERT INTO posts
(is_oppost, subject, body, created_at, board_id, oppost_id)
VALUES
($1, $2, $3, $4, $5, $6)
RETURNING id, is_oppost, subject, body, created_at, board_id
"#
)
.bind(false) // if oppost_id is supplied - always false
.bind(&post.subject.unwrap())
.bind(&post.body.unwrap())
.bind(&get_unix_timestamp_ms())
.bind(&post.board_id)
.bind(&op)
.fetch_one(&mut tx)
.await?;
},
None => {
result = sqlx::query_as::<_, Post>(
r#"INSERT INTO posts
(is_oppost, subject, body, created_at, board_id)
VALUES
($1, $2, $3, $4, $5)
RETURNING id, is_oppost, subject, body, created_at, board_id
"#
)
.bind(true) // with oppost_id - always true
.bind(&post.subject.unwrap())
.bind(&post.body.unwrap())
.bind(&get_unix_timestamp_ms())
.bind(&post.board_id)
.fetch_one(&mut tx)
.await?;
}
};
tx.commit().await?;
// run verification on whether the thread should be locked
Self::verify_thread(result.clone(), pool).await?;
Ok(result)
}
// check if thread should be locked and lock it if necessary
// this is probably shit that should be rewritten
pub async fn verify_thread(post: Self, pool: &Pool<Postgres>) -> Result<bool, sqlx::Error> {
let copied_post = post.clone();
let oppost_id: i32 = if post.is_oppost {post.id} else {post.oppost_id.unwrap()};
let posts_in_thread: i64 = Post::count_by_oppost_id(post, pool).await?;
let max_posts = Board::max_posts(pool, copied_post.board_id as i64).await?;
if posts_in_thread >= max_posts {
Post::lock_thread(oppost_id.into(), pool).await?;
return Ok(true)
}
Ok(true)
}
pub async fn lock_thread(oppost_id: i64, pool: &Pool<Postgres>) -> sqlx::Result<PgDone> {
let mut tx = pool.begin().await.unwrap(); // transaction
sqlx::query!(
r#"
UPDATE posts
SET is_locked = TRUE
WHERE (oppost_id = $1);
"#, oppost_id as i32
).execute(&mut tx).await //fetch_one(&mut tx).await?;
}
} |
#[doc = "Register `SECCFGR` reader"]
pub type R = crate::R<SECCFGR_SPEC>;
#[doc = "Register `SECCFGR` writer"]
pub type W = crate::W<SECCFGR_SPEC>;
#[doc = "Field `C2EWILA` reader - wakeup on CPU2 illegal access interrupt enable"]
pub type C2EWILA_R = crate::BitReader;
#[doc = "Field `C2EWILA` writer - wakeup on CPU2 illegal access interrupt enable"]
pub type C2EWILA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 15 - wakeup on CPU2 illegal access interrupt enable"]
#[inline(always)]
pub fn c2ewila(&self) -> C2EWILA_R {
C2EWILA_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 15 - wakeup on CPU2 illegal access interrupt enable"]
#[inline(always)]
#[must_use]
pub fn c2ewila(&mut self) -> C2EWILA_W<SECCFGR_SPEC, 15> {
C2EWILA_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Power security configuration register \\[dual core device only\\]\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`seccfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`seccfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SECCFGR_SPEC;
impl crate::RegisterSpec for SECCFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`seccfgr::R`](R) reader structure"]
impl crate::Readable for SECCFGR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`seccfgr::W`](W) writer structure"]
impl crate::Writable for SECCFGR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SECCFGR to value 0x8000"]
impl crate::Resettable for SECCFGR_SPEC {
const RESET_VALUE: Self::Ux = 0x8000;
}
|
//! Multiwii Serial Protocol (MSP) traffic decoder and structures
//!
//! Incomplete. Includes some structures from Cleanflight and Betaflight.
#![cfg_attr(not(feature="std"), no_std)]
#![cfg_attr(not(feature="std"), feature(alloc))]
#[cfg(not(feature="std"))]
#[macro_use]
extern crate alloc;
extern crate packed_struct;
#[macro_use]
extern crate packed_struct_codegen;
#[macro_use]
extern crate serde_derive;
extern crate serde;
mod prelude;
mod commands;
mod packet;
pub mod structs;
pub use commands::*;
pub use packet::*;
|
#![allow(unused_variables)]
fn main() {
let mut s1 = String::from("STRING 01");
{
// test 1:
let s2 = &s1;
let s3 = &s1;
//let s4 = &mut s1; // <- this will be error
}
{
// test 2:
let s4 = &mut s1;
//let s3 = &s1; // <- this will be error
}
{
// test 3:
{
let s4 = s1; // move
let s2 = &s4;
}
}
}
|
/// sum((x_i - p)^2) = sum( p^2 - 2(sum(x_i))p + sum(x_i^2) )
/// 微分すると
/// sum(2p -2(sum(x_i))) = 2Np -2(sum(x_i)) = 0
/// p = sum(x_i) / N = average
/// よってpはXの平均
/// 整数である必要があるので 保険として平均の前後2つづつの整数を調べて最小のものとする
fn main() {
proconio::input! {
n: usize,
x: [i32; n],
}
let sum: i32 = x.iter().sum();
let p = sum / (n as i32);
let mut ans: i32 = 1000000000;
for k in -2..4 {
let q = p + k;
let mut ave = 0;
for i in 0..n {
ave += (x[i] - q) * (x[i] - q);
}
if ave < ans {
ans = ave;
}
// println!("{} {}", q, ave);
}
println!("{}", ans);
} |
use clang::*;
pub struct CType<'a> {
clang_type: Type<'a>
}
impl<'a> CType<'a> {
pub fn new(clang_type: Type<'a>) -> CType<'a> {
CType {
clang_type: clang_type.get_canonical_type()
}
}
pub fn get_underlying_type(&self) -> &Type<'a> {
&self.clang_type
}
pub fn is_void(&self) -> bool {
self.clang_type.get_kind() == TypeKind::Void
}
pub fn to_string_named(&self, name: &str) -> String {
let mut ty_str = match self.clang_type.get_kind() {
TypeKind::Void => "void".to_string(),
_ => self.clang_type.get_display_name()
};
ty_str
}
pub fn to_vsl_string(&self) -> String {
match self.clang_type.get_kind() {
TypeKind::Void => "Void".to_string(),
TypeKind::Bool => "Bool".to_string(),
TypeKind::Nullptr => "Pointer<UInt8>".to_string(),
TypeKind::ObjCObjectPointer |
TypeKind::ObjCClass => self.clang_type.get_display_name(),
TypeKind::CharS |
TypeKind::Short |
TypeKind::SChar |
TypeKind::CharU |
TypeKind::UChar |
TypeKind::UShort |
TypeKind::Int |
TypeKind::UInt |
TypeKind::Long |
TypeKind::ULong |
TypeKind::LongLong |
TypeKind::ULongLong |
TypeKind::Int128 |
TypeKind::UInt128 |
TypeKind::Half |
TypeKind::Float16 |
TypeKind::Float |
TypeKind::Double |
TypeKind::LongDouble |
TypeKind::WChar => {
format!(
"{}Int{}",
if self.clang_type.is_signed_integer() { "" } else { "U" },
clamp_error!(self.clang_type.get_sizeof(), "could not get sizeof {:?}", self.clang_type)
)
},
_ => error_exit!("Unsupported vsl stringify type {:?}", self.clang_type)
}
}
}
impl<'a> ToString for CType<'a> {
fn to_string(&self) -> String {
self.to_string_named("")
}
}
impl<'a> PartialEq for CType<'a> {
fn eq(&self, other: &CType) -> bool {
self.clang_type == other.clang_type
}
}
|
use crate::error_reporter::error;
use crate::token::Token;
use crate::token_type::TokenType;
use crate::traits::Lexer;
pub struct Scanner {
source: String,
tokens: Vec<Token>,
start: u32,
current: u32,
line: u32,
}
impl Lexer for Scanner {
fn scan_tokens(&mut self) -> Vec<Token> {
let mut tokens = vec![];
let mut start = 0;
let mut current = 0;
let mut line = 0;
while !self.is_at_end() {
start = current;
self.scan_token();
// tokens.push(value: T)
}
tokens.push(Token {
token: TokenType::Eof,
lexeme: "".to_string(),
line,
literal: "".to_string(),
});
tokens
}
fn scan_token(&mut self) {
let character = self.advance();
match character {
'(' => self.add_token(TokenType::LeftParen),
')' => self.add_token(TokenType::RightParen),
'{' => self.add_token(TokenType::LeftBrace),
'}' => self.add_token(TokenType::RightBrace),
',' => self.add_token(TokenType::Comma),
'.' => self.add_token(TokenType::Dot),
'-' => self.add_token(TokenType::Minus),
'+' => self.add_token(TokenType::Plus),
';' => self.add_token(TokenType::SemiColon),
'*' => self.add_token(TokenType::Star),
' ' | '\r' | '\t' => (),
'\n' => self.line += 1,
'/' => {
if self.advance_if_next('/') {
while self.peek() != '\n' {
self.advance();
}
} else {
self.add_token(TokenType::Slash);
}
}
'!' => {
if self.advance_if_next('=') {
self.add_token(TokenType::BangEqual)
} else {
self.add_token(TokenType::BangEqual)
}
}
'=' => {
if self.advance_if_next('=') {
self.add_token(TokenType::EqualEqual)
} else {
self.add_token(TokenType::Equal)
}
}
'<' => {
if self.advance_if_next('=') {
self.add_token(TokenType::LessEqual)
} else {
self.add_token(TokenType::Less)
}
}
'>' => {
if self.advance_if_next('=') {
self.add_token(TokenType::GreaterEqual)
} else {
self.add_token(TokenType::Greater)
}
}
'"' => self.string(),
_ => error(self.line, "Unexpected character"),
}
}
fn add_token(&mut self, token: TokenType) {
self.add_token_literal(token, "");
}
fn add_token_literal(&mut self, token: TokenType, literal: &str) {
let lexeme = self
.source
.chars()
.skip(self.start as usize)
.take(self.current as usize)
.collect();
self.tokens.push(Token {
token,
lexeme,
line: 1,
literal: literal.to_string(),
});
}
fn advance(&mut self) -> char {
self.current += 1;
let next = self.source.as_bytes()[self.current as usize];
next as char
}
fn advance_if_next(&mut self, expected: char) -> bool {
if self.is_at_end() {
return false;
}
let next_char = self.source.as_bytes()[(self.current + 1) as usize] as char;
if next_char != expected {
return false;
}
self.current += 1;
true
}
fn peek(&self) -> char {
if self.is_at_end() {
return '\0';
}
self.source.as_bytes()[self.current as usize] as char
}
fn is_at_end(&self) -> bool {
self.current >= self.source.len() as u32
}
fn string(&mut self) {
while self.peek() != '"' && !self.is_at_end() {
if self.peek() == '\n' {
self.line += 1;
}
self.advance();
}
if self.is_at_end() {
error(self.line, "Unterminated string.");
return;
}
self.advance();
let value: String = self
.source
.chars()
.skip((self.start + 1) as usize)
.take((self.current - 1) as usize)
.collect();
self.add_token_literal(TokenType::STRING, value.as_str());
}
}
|
use math::Rect;
use draw::{Bounded, CanvasRead, CanvasWrite};
use tool::{Editor, Brush, PreviewContext};
pub struct Prev<'a> {
pub ptr: *mut u8,
pub rect: Rect<i32>,
pub editor: &'a Editor,
}
impl<'a> Bounded<i32> for Prev<'a> {
#[inline(always)]
fn bounds(&self) -> Rect<i32> { self.rect }
}
impl<'a> CanvasWrite<u8, i32> for Prev<'a> {
#[inline(always)]
unsafe fn set_unchecked(&mut self, x: i32, y: i32, color: u8) {
let m = self.editor.image.as_receiver();
let c = if self.editor.transparent() == Some(color) {
0
} else {
m.palette[color].to_le()
};
let ptr = self.ptr.add((m.width * y as usize + x as usize) * 4);
*ptr.add(0) = ( c & 0xFF) as u8;
*ptr.add(1) = ((c >> 8) & 0xFF) as u8;
*ptr.add(2) = ((c >> 16) & 0xFF) as u8;
*ptr.add(3) = ((c >> 24) & 0xFF) as u8;
}
}
impl<'a> CanvasRead<u8, i32> for Prev<'a> {
#[inline(always)]
unsafe fn at_unchecked(&self, x: i32, y: i32) -> u8 {
self.editor.at_unchecked(x, y)
}
}
impl<'a> PreviewContext<i32, u8> for Prev<'a> {
fn color(&self) -> u8 { self.editor.color() }
fn brush(&self) -> (Brush, Rect<i32>) { self.editor.brush() }
}
|
use std::io::Error;
use std::fs::File;
use std::io::prelude::*;
fn read_input(filename: &str) -> Result<String, Error> {
let mut input = String::new();
File::open(filename)?.read_to_string(&mut input)?;
return Ok(input);
}
fn main() {
match read_input("input.txt") {
Ok(input) => {
println!("Part 1 answer: {}", answer1(&input));
//println!("Part 2 answer: {}", answer2(""));
//println!("Part 2 answer: {}", answer2("AoC 2017"));
//println!("Part 2 answer: {}", answer2("1,2,3"));
//println!("Part 2 answer: {}", answer2("1,2,4"));
println!("Part 2 answer: {}", answer2(&input));
},
Err(e) => println!("Error: {}", e),
}
}
fn answer1(input: &str) -> i32 {
let lengths: Vec<u8> = input
.to_owned().trim()
.split(',')
.flat_map(|s| s.parse::<u8>())
.collect();
let mut knot_list = KnotList {
list: (0..256).map(|x| x as u8).collect(),
offset: 0,
skip_size: 0,
};
knot_list.apply_lengths(&lengths);
let v0 = knot_list.list[0];
let v1 = knot_list.list[1];
return (v0 as i32)*(v1 as i32);
}
fn answer2(input: &str) -> String {
let mut lengths: Vec<u8> = input
.to_owned().trim()
.bytes()
.collect();
lengths.push(17);
lengths.push(31);
lengths.push(73);
lengths.push(47);
lengths.push(23);
let mut knot_list = KnotList {
list: (0..256).map(|x| x as u8).collect(),
offset: 0,
skip_size: 0,
};
for _i in 0..64 {
knot_list.apply_lengths(&lengths);
}
return knot_list.get_hash_string();
}
struct KnotList {
list: Vec<u8>,
offset: usize,
skip_size: usize,
}
impl KnotList {
fn apply_lengths(&mut self, lengths: &Vec<u8>) -> () {
for &length in lengths { self.reverse_length_and_advance(length); }
}
fn reverse_length_and_advance(&mut self, length: u8) -> () {
for i in 0..(length as usize / 2) {
let i2 = length as usize - i - 1;
let ival = self.get(i);
let i2val = self.get(i2);
self.set(i, i2val);
self.set(i2, ival);
}
self.offset += length as usize + self.skip_size;
self.skip_size += 1;
}
fn get(&self, index: usize) -> u8 {
let i = (self.offset + index) % self.list.len();
return self.list[i];
}
fn set(&mut self, index: usize, value: u8) -> () {
let i = (self.offset + index) % self.list.len();
self.list[i] = value;
}
fn get_hash(&self) -> Vec<u8> {
let mut hash: Vec<u8> = Vec::new();
for i in 0..(16 as usize) {
let mut acc: u8 = 0;
for j in 0..(16 as usize) {
acc ^= self.list[i*16 + j] as u8;
}
hash.push(acc);
}
return hash;
}
fn get_hash_string(&self) -> String {
let hexes: Vec<String> = self
.get_hash()
.iter()
.map(|b| format!("{:02x}", b))
.collect();
return hexes.join("");
}
}
|
use std::process::*;
use std::time::*;
use std::io::Result;
use std::io::Write;
use std::thread::sleep;
const FALLBACK: &str = "fallback.mp3";
static urls: [&str; 1] = [
"http://direct.franceinter.fr/live/franceinter-hifi.aac"
];
pub struct Player {
// show must go on, so if there is a problem we still have
// a player structure without the process
process: Option<Child>,
current: usize,
}
impl Player {
pub fn new() -> Self {
Player { process: None, current: 0 }
}
pub fn init(&mut self) -> Result<()> {
self.process = Some(Player::spawn()?);
self.send_command("volume 256")?;
self.setup()?;
Ok(())
}
pub fn change_url(&mut self, next: bool) -> Result<()> {
self.alive()?;
if next {
self.current += 1;
if self.current >= urls.len() {
self.current = 0;
}
} else {
if self.current == 0 {
self.current = urls.len()-1;
} else {
self.current -= 1;
}
}
self.requeue(true)
}
pub fn play(&mut self) -> Result<()> {
self.alive()?;
self.send_command("play")
}
pub fn stop(&mut self) -> Result<()> {
self.alive()?;
self.send_command("stop")
}
pub fn voldown(&mut self) -> Result<()> {
self.alive()?;
self.send_command("voldown")
}
pub fn volup(&mut self) -> Result<()> {
self.alive()?;
self.send_command("volup")
}
fn alive(&mut self) -> Result<()> {
match self.process {
None => self.init(),
Some(ref mut p) => match p.try_wait()? {
Some(_) => self.respawn(),
None => Ok(()),
}
}
}
fn respawn(&mut self) -> Result<()> {
self.process = Some(Player::spawn()?);
self.setup()
}
fn requeue(&mut self, play: bool) -> Result<()> {
self.send_command("clear")?;
let action = if play { "add" } else { "enqueue" };
let cmd = format!("{} {}", action, urls[self.current]);
self.send_command(&cmd)?;
let cmd = format!("enqueue {}", FALLBACK);
self.send_command(&cmd)
}
fn setup(&mut self) -> Result<()> {
sleep(Duration::from_millis(300));
self.requeue(false)?;
self.send_command("loop on")
}
fn send_command(&mut self, command: &str) -> Result<()> {
let process = match self.process {
Some(ref mut p) => p,
None => return Err(std::io::Error::last_os_error()),
};
let stdin = process.stdin.as_mut().expect("Failed to open stdin");
let cmd = format!("{}\n", command);
println!("Sending {}", command);
stdin.write_all(cmd.as_bytes())?;
Ok(())
}
fn spawn() -> Result<Child> {
Command::new("vlc")
.arg("-Irc")
.stdin(Stdio::piped())
.spawn()
}
}
|
// There is a new alien language which uses the latin alphabet. However, the order among letters
// are unknown to you. You receive a list of words from the dictionary, wherewords are sorted
// lexicographically by the rules of this new language. Derive the order of letters in this
// language.
//
// For example,
// Given the following words in dictionary,
//
// [
// "wrt",
// "wrf",
// "er",
// "ett",
// "rftt"
// ]
// The correct order is: "wertf".
//
// Note:
//
// You may assume all letters are in lowercase.
// If the order is invalid, return an empty string.
// There may be multiple valid order of letters, return any one of them is fine.
use std::collections::HashMap;
use std::collections::HashSet;
use std::cmp::max;
fn alien_dictionary(dict: &Vec<&str>) -> Vec<char> {
let graph = build_graph(dict);
println!("{:?}", graph);
topological_sort(graph)
}
fn build_graph(dict: &[&str]) -> HashMap<char, HashSet<char>> {
let mut graph = HashMap::new();
for pair in dict.windows(2) {
let word1: Vec<char> = pair[0].chars().collect();
let word2: Vec<char> = pair[1].chars().collect();
let max = max(word1.len(), word2.len());
let mut found_difference = false;
for idx in 0..max {
let c1 = word1.get(idx);
let c2 = word2.get(idx);
if let Some(c) = c1 {
if !graph.contains_key(c) {
graph.insert(*c, HashSet::new());
}
}
if let Some(c) = c2 {
if !graph.contains_key(c) {
graph.insert(*c, HashSet::new());
}
}
if let (Some(c1), Some(c2), false) = (c1, c2, found_difference) {
if c1 != c2 {
graph.get_mut(c1).unwrap().insert(*c2);
found_difference = true;
}
}
}
}
graph
}
fn topological_sort(graph: HashMap<char, HashSet<char>>) -> Vec<char> {
let mut stack: Vec<&char> = graph.keys().collect();
let mut out = Vec::new();
while let Some(c) = stack.pop() {
if out.contains(c) {
continue;
}
let edges: Vec<&char> = graph.get(c).unwrap().iter().filter(|edge| !out.contains(*edge)).collect();
if edges.len() == 0 {
out.push(*c);
} else {
stack.push(c);
for edge in edges {
stack.push(&edge);
}
}
}
out.into_iter().rev().collect()
}
#[cfg(test)]
mod tests {
use super::alien_dictionary;
#[test]
fn simple_dictionary() {
let dict = vec!["aaa", "bbb", "ccc"];
let order = vec!['a', 'b', 'c'];
assert_eq!(alien_dictionary(&dict), order);
}
#[test]
fn dict_1() {
let dict = vec!["baa", "abcd", "abca", "cab", "cad"];
let order = vec!['b', 'd', 'a', 'c'];
assert_eq!(alien_dictionary(&dict), order);
}
#[test]
fn dict_2() {
let dict = vec!["caa", "aaa", "aab"];
let order = vec!['c', 'a', 'b'];
assert_eq!(alien_dictionary(&dict), order);
}
#[test]
fn dict_3() {
let dict = vec!["wrt", "wrf", "er", "ett", "rftt"];
let order = vec!['w', 'e', 'r', 't', 'f'];
assert_eq!(alien_dictionary(&dict), order);
}
}
|
use std::{collections::HashMap, env, fs};
enum Direction {
Up,
Down,
Left,
Right,
}
type Input = Vec<(Direction, i32)>;
fn main() {
let args: Vec<String> = env::args().collect();
if args.get(1).is_none() {
panic!("Supply a file to run against");
}
let content = fs::read_to_string(args.get(1).unwrap()).expect("Reading file went wrong");
let lines: Vec<&str> = content.lines().collect();
let chars = lines
.iter()
.map(|line| {
let (dir, num) = line.trim().split_once(' ').unwrap();
let num = num.parse::<i32>().unwrap();
match dir {
"R" => (Direction::Right, num),
"L" => (Direction::Left, num),
"U" => (Direction::Up, num),
"D" => (Direction::Down, num),
_ => panic!("Unknown direction"),
}
})
.collect();
part1(&chars);
part2(&chars);
}
fn move_head(head: (i32, i32), dir: &Direction) -> (i32, i32) {
match dir {
Direction::Up => (head.0, head.1 + 1),
Direction::Down => (head.0, head.1 - 1),
Direction::Left => (head.0 - 1, head.1),
Direction::Right => (head.0 + 1, head.1),
}
}
fn move_knot(knot: (i32, i32), other: (i32, i32)) -> (i32, i32) {
let diff_point = (other.0 - knot.0, other.1 - knot.1);
match diff_point {
(0, 2) => (knot.0, knot.1 + 1),
(0, -2) => (knot.0, knot.1 - 1),
(2, 0) => (knot.0 + 1, knot.1),
(-2, 0) => (knot.0 - 1, knot.1),
(1, 2) => (knot.0 + 1, knot.1 + 1),
(1, -2) => (knot.0 + 1, knot.1 - 1),
(-1, 2) => (knot.0 - 1, knot.1 + 1),
(-1, -2) => (knot.0 - 1, knot.1 - 1),
(2, 1) => (knot.0 + 1, knot.1 + 1),
(2, -1) => (knot.0 + 1, knot.1 - 1),
(-2, 1) => (knot.0 - 1, knot.1 + 1),
(-2, -1) => (knot.0 - 1, knot.1 - 1),
(2, 2) => (knot.0 + 1, knot.1 + 1),
(2, -2) => (knot.0 + 1, knot.1 - 1),
(-2, 2) => (knot.0 - 1, knot.1 + 1),
(-2, -2) => (knot.0 - 1, knot.1 - 1),
_ => knot,
}
}
fn part1(n: &Input) {
let mut head_pos = (0, 0);
let mut tail_pos = (0, 0);
let mut map: HashMap<(i32, i32), bool> = HashMap::new();
for (dir, num) in n {
for _ in 0..*num {
head_pos = move_head(head_pos, dir);
tail_pos = move_knot(tail_pos, head_pos);
// Only move the tail if it's outside the 1x of the head
map.insert(tail_pos, true);
}
}
let mut sum = 0;
for (pos, _) in map {
sum += 1;
}
println!("Part 1: {}", sum);
}
fn part2(n: &Input) {
let mut rope = Vec::new();
for _ in 0..10 {
rope.push((0, 0));
}
let mut map: HashMap<(i32, i32), bool> = HashMap::new();
for (dir, num) in n {
for _ in 0..*num {
rope[0] = move_head(rope[0], dir);
for i in 1..rope.len() {
rope[i] = move_knot(rope[i], rope[i - 1]);
}
// Only move the tail if it's outside the 1x of the head
map.insert(rope[rope.len() - 1], true);
}
}
let mut sum = 0;
for (pos, _) in map {
sum += 1;
}
println!("Part 2: {}", sum);
}
|
mod pest_parser_impl;
pub(crate) use pest_parser_impl::PestParserImpl;
|
extern crate libpasta;
extern crate ring;
use ring::hkdf;
#[test]
fn test_hmac() {
// Use scrypt as the default inner hash
let hash_primitive = libpasta::primitives::Scrypt::default();
let mut config = libpasta::Config::with_primitive(hash_primitive);
// Some proper way of getting a key
let key = b"yellow submarine";
let key_id = config.add_key(key);
// Construct an HMAC instance and use this as the outer configuration
let keyed_function = libpasta::primitives::Hmac::with_key_id(hkdf::HKDF_SHA256, &key_id);
config.set_keyed_hash(keyed_function);
let hash = config.hash_password("hunter2");
println!("Computed hash: {:?}", hash);
// Outputs:
// Computed hash: "$!$hmac$key_id=LNMhDy...,h=SHA256$$scrypt$ln=14,r=8,p=1$ZJ5EY...$grlNA...."
assert!(hash.starts_with("$!$hmac"));
assert!(hash.contains("scrypt"));
}
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Describes the address of UDP reflectors
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct CallConnection {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// Reflector identifier
id: isize,
/// IPv4 reflector address
ip: String,
/// IPv6 reflector address
ipv6: String,
/// Reflector port number
port: i64,
/// Connection peer tag
peer_tag: String,
}
impl RObject for CallConnection {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"callConnection"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl CallConnection {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDCallConnectionBuilder {
let mut inner = CallConnection::default();
inner.td_name = "callConnection".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDCallConnectionBuilder { inner }
}
pub fn id(&self) -> isize {
self.id
}
pub fn ip(&self) -> &String {
&self.ip
}
pub fn ipv6(&self) -> &String {
&self.ipv6
}
pub fn port(&self) -> i64 {
self.port
}
pub fn peer_tag(&self) -> &String {
&self.peer_tag
}
}
#[doc(hidden)]
pub struct RTDCallConnectionBuilder {
inner: CallConnection,
}
impl RTDCallConnectionBuilder {
pub fn build(&self) -> CallConnection {
self.inner.clone()
}
pub fn id(&mut self, id: isize) -> &mut Self {
self.inner.id = id;
self
}
pub fn ip<T: AsRef<str>>(&mut self, ip: T) -> &mut Self {
self.inner.ip = ip.as_ref().to_string();
self
}
pub fn ipv6<T: AsRef<str>>(&mut self, ipv6: T) -> &mut Self {
self.inner.ipv6 = ipv6.as_ref().to_string();
self
}
pub fn port(&mut self, port: i64) -> &mut Self {
self.inner.port = port;
self
}
pub fn peer_tag<T: AsRef<str>>(&mut self, peer_tag: T) -> &mut Self {
self.inner.peer_tag = peer_tag.as_ref().to_string();
self
}
}
impl AsRef<CallConnection> for CallConnection {
fn as_ref(&self) -> &CallConnection {
self
}
}
impl AsRef<CallConnection> for RTDCallConnectionBuilder {
fn as_ref(&self) -> &CallConnection {
&self.inner
}
}
|
use std::{env, panic};
use scicalc_rs::parser::eval;
fn show_usage() {
println!("Usage: scicalc-rs [expression]");
}
fn main() {
panic::set_hook(Box::new(|_info| {
// do nothing
//TODO: Add proper error handling
}));
let args: Vec<String> = env::args().collect();
if args.len() <= 1 {
println!("Error: Not enough args.");
show_usage();
} else if args.len() == 2 {
let result = panic::catch_unwind(|| {
let text = args[1].as_str();
eval(text)
});
match result {
Ok(res) => println!("{}", res),
Err(_) => println!("Error: could not parse expression."),
}
} else {
println!("Error: too many args.");
show_usage();
}
}
|
use std::collections::BinaryHeap;
struct MedianFinder {
hi: BinaryHeap<i32>,
lo: BinaryHeap<i32>,
}
impl MedianFinder {
fn new() -> Self {
Self {
hi: BinaryHeap::new(),
lo: BinaryHeap::new(),
}
}
fn add_num(&mut self, num: i32) {
self.lo.push(num);
self.hi.push(-self.lo.pop().unwrap());
if self.hi.len() > self.lo.len() {
self.lo.push(-self.hi.pop().unwrap());
}
}
fn find_median(&self) -> f64 {
if self.hi.len() == self.lo.len() {
(self.lo.peek().unwrap() - self.hi.peek().unwrap()) as f64 / 2.0
} else {
*self.lo.peek().unwrap() as f64
}
}
} |
use super::content::PositionDetails;
use std::error::Error;
use std::fmt;
#[derive(Debug, Clone)]
pub struct ParsingError {
display_text: String,
message: String,
filename: String,
line_number: usize,
column: usize,
byte_offset: usize,
}
impl ParsingError {
pub fn new(position: &PositionDetails, message: String) -> ParsingError {
let position_line = ParsingError::fmt_position_line(&position);
let display_text = format!("Error: {}\n{}\n", message, position_line);
ParsingError {
display_text,
message,
filename: String::from(position.file_content.filename()),
line_number: position.line_number,
column: position.column,
byte_offset: position.byte_offset
}
}
pub fn fmt_position_line(details: &PositionDetails) -> String {
format!(" at \"{}\" {}:{}", details.file_content.filename(), details.line_number + 1, details.column + 1)
}
}
impl Error for ParsingError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
None
}
}
impl fmt::Display for ParsingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.display_text)
}
}
|
#[doc = "Reader of register NEXT_SUP_TO_STATUS"]
pub type R = crate::R<u32, super::NEXT_SUP_TO_STATUS>;
#[doc = "Reader of field `NEXT_SUP_TO`"]
pub type NEXT_SUP_TO_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - HW updates this register for the SuperVision timeout next instant, granularity is 625us"]
#[inline(always)]
pub fn next_sup_to(&self) -> NEXT_SUP_TO_R {
NEXT_SUP_TO_R::new((self.bits & 0xffff) as u16)
}
}
|
extern crate serde_json;
use currency::api::ServiceApi;
use currency::api::error::ApiError;
use currency::offers::HistoryOffers;
use currency::offers::history;
use exonum::api::Api;
use exonum::blockchain::Blockchain;
use exonum::crypto::Hash;
use exonum::encoding::serialize::FromHex;
use hyper::header::ContentType;
use iron::headers::AccessControlAllowOrigin;
use iron::prelude::*;
use iron::status;
use prometheus::IntCounter;
use router::Router;
use std::collections::HashMap;
const MAX_BLOCKS_PER_REQUEST: u64 = 1000;
#[derive(Clone)]
pub struct HistoryOffersApi {
pub blockchain: Blockchain,
}
pub type HistoryOfferResult = Result<Option<HistoryOffers>, ApiError>;
pub type HistoryOffersResponse = Result<HistoryOffersInfo, ApiError>;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct HistoryOffersInfo {
pub total: u64,
pub count: u64,
pub offer_info: HashMap<Hash, HistoryOfferInfo>,
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct HistoryOfferInfo {
pub tx_amount: u64,
}
impl HistoryOfferInfo {
pub fn from(open_offers: &HistoryOffers) -> Self {
HistoryOfferInfo {
tx_amount: open_offers.history().len() as u64,
}
}
}
impl HistoryOffersApi {
fn pagination_history_offers(
&self,
offset: u64,
limit: u64,
) -> (HashMap<Hash, HistoryOfferInfo>, u64, u64) {
let view = &mut self.blockchain.fork();
let idx = history::Schema(view).index();
let mut total: u64 = 0;
let mut count: u64 = 0;
let mut result = HashMap::new();
for v in idx.iter() {
if total < offset || total >= offset + limit {
total += 1;
continue;
}
let hoi = HistoryOfferInfo::from(&v.1);
result.insert(v.0, hoi);
count += 1;
total += 1;
}
(result, total, count)
}
}
lazy_static! {
static ref LIST_REQUESTS: IntCounter = register_int_counter!(
"dmbc_history_offers_api_list_requests_total",
"OpenOffer list requests."
).unwrap();
static ref LIST_RESPONSES: IntCounter = register_int_counter!(
"dmbc_history_offers_api_list_responses_total",
"OpenOffer list responses."
).unwrap();
static ref INFO_REQUESTS: IntCounter = register_int_counter!(
"dmbc_history_offers_api_info_requests_total",
"OpenOffer info requests."
).unwrap();
static ref INFO_RESPONSES: IntCounter = register_int_counter!(
"dmbc_history_offers_api_info_responses_total",
"OpenOffer info responses."
).unwrap();
}
impl Api for HistoryOffersApi {
fn wire(&self, router: &mut Router) {
// Gets status of the wallet corresponding to the public key.
let _self = self.clone();
let history_offers_info = move |req: &mut Request| -> IronResult<Response> {
LIST_REQUESTS.inc();
let (offset, limit) = ServiceApi::pagination_params(req);
let (offer_info, total, count) = _self.pagination_history_offers(offset, limit);
let result: HistoryOffersResponse = Ok(HistoryOffersInfo {
total,
count,
offer_info,
});
let mut res =
Response::with((status::Ok, serde_json::to_string_pretty(&result).unwrap()));
res.headers.set(ContentType::json());
res.headers.set(AccessControlAllowOrigin::Any);
LIST_RESPONSES.inc();
Ok(res)
};
let _self = self.clone();
let bids_asks = move |req: &mut Request| -> IronResult<Response> {
INFO_REQUESTS.inc();
let params = req.extensions.get::<Router>().unwrap();
let result: HistoryOfferResult = match params.find("tx_hash") {
Some(tx_hash_str) =>
match Hash::from_hex(tx_hash_str) {
Ok(tx_hash) => {
let view = &mut _self.blockchain.fork();
Ok(Some(history::Schema(view).fetch(&tx_hash)))
},
Err(_) => Err(ApiError::TransactionHashInvalid)
},
None => Err(ApiError::IncorrectRequest),
};
let status_code = match result {
Ok(_) => status::Ok,
Err(e) => e.to_status(),
};
let body = serde_json::to_string_pretty(&result).unwrap();
let mut res = Response::with((status_code, body));
res.headers.set(ContentType::json());
res.headers.set(AccessControlAllowOrigin::Any);
INFO_RESPONSES.inc();
Ok(res)
};
router.get("/v1/history/offers", history_offers_info, "history_offers_info");
router.get("/v1/history/offers/:tx_hash", bids_asks, "offer_history");
}
}
|
use serde::{de, Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct User {
id: u64,
name: Name,
}
impl User {
pub fn id(&self) -> u64 {
self.id
}
}
/// 名前を表す型の定義
#[derive(Clone, Debug, Serialize)]
struct Name(String);
impl Name {
/// 値のチェックを行った上でNameを作成する
pub fn new(name: &str) -> Result<Self, String> {
let size = name.chars().count();
if size < 1 || size > 10 {
return Err("名前は10文字以内です".to_string());
}
if name.chars().any(|c| !c.is_ascii_alphabetic()) {
return Err("名前が使用できる文字種はA-Z, a-zです".to_string());
}
Ok(Name(name.to_string()))
}
}
impl<'de> de::Deserialize<'de> for Name {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Name::new(&s).map_err(de::Error::custom)
}
}
/// 文字列からの変換を表す
/// このtraitの実装をwarp::path::params()関数が要求する
impl std::str::FromStr for Name {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Name::new(s)
}
}
/// handlerでformatを行うために要求される
impl std::fmt::Display for Name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[test]
fn test_name() {
let ok_value = "Nrskt";
assert!(Name::new(ok_value).is_ok());
let ok_value = "N";
assert!(Name::new(ok_value).is_ok());
let ok_value = "NrsktNrskt";
assert!(Name::new(ok_value).is_ok());
let ng_value = "0";
assert!(Name::new(ng_value).is_err());
let ng_value = "";
assert!(Name::new(ng_value).is_err());
let ng_value = "NrsktNrsktN";
assert!(Name::new(ng_value).is_err());
}
|
//! Representation of a category in a Gentoo repository
use crate::{
repository::{Ebuild, Package},
util::optfilter::OptFilter,
};
use std::{io::Error, path::PathBuf, result::Result};
/// Represents a discrete Gentoo category
pub struct Category {
root: PathBuf,
category: String,
}
impl Category {
/// Construct a new [`Category`] explicitly
pub fn new(root: PathBuf, category: String) -> Category {
Category { root, category }
}
/// Return the path to the category
pub fn path(&self) -> PathBuf { self.root.join(&self.category) }
/// Return the name of the category
pub fn name(&self) -> String { self.category.to_owned() }
/// Return an iterator over all packages in this category
pub fn packages(
&self,
) -> Result<Box<dyn Iterator<Item = Result<Package, Error>>>, Error> {
let root = self.root.to_owned();
let category = self.category.to_owned();
Ok(Box::new(root.join(&category).read_dir()?.map_oks(move |ent| {
let ent_fn = ent.file_name();
Ok(Package::new(
root.to_owned(),
category.to_owned(),
ent_fn
.to_str()
.expect("Could not decode filename as UTF8")
.to_owned(),
))
})))
}
/// Return an iterator over all ebuilds in this category
pub fn ebuilds(
&self,
) -> Result<Box<dyn Iterator<Item = Result<Ebuild, Error>>>, Error> {
self.packages().map(|pkg_it| {
Box::new(pkg_it.filter_oks(Package::is_legal).flat_map(
|pkg_res| match pkg_res {
Ok(pkg) => match pkg.ebuilds() {
Ok(ebuild_iter) => ebuild_iter,
Err(e) => Box::new(vec![Err(e)].into_iter()),
},
Err(e) => Box::new(vec![Err(e)].into_iter()),
},
)) as Box<dyn Iterator<Item = _>>
})
}
/// Get a package within this category
pub fn get_package(&self, name: &str) -> Package {
Package::new(
self.root.to_owned(),
self.category.to_owned(),
name.to_string(),
)
}
/// returns if a given category exists or not
pub fn exists(&self) -> bool { self.path().exists() }
/// determines if a category has a legal name or not
pub fn has_legal_name(&self) -> bool {
match self.category.as_str() {
"metadata" | "profiles" | "eclass" | ".git" | "distfiles"
| "packages" | "scripts" => false,
_ => true,
}
}
/// Returns if a category is "legal" or not
///
/// This means the category has both a legal name, and its path is a
/// directory
pub fn is_legal(&self) -> bool {
self.has_legal_name() && self.path().is_dir()
}
/// Determines if a category has children
///
/// This is a perfomance hit because it has to invoke readdir on the
/// category and begins package discovery, but returns true as soon as
/// readdir yeilds a package
pub fn is_non_empty(&self) -> bool {
self.packages().unwrap().any(|x| x.is_ok())
}
}
impl std::fmt::Debug for Category {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "cat: {}", &self.category)
}
}
impl crate::util::repoobject::RepoObject for Category {
fn name(&self) -> String { self.category.to_owned() }
fn path(&self) -> PathBuf { self.root.join(&self.category) }
fn ident(&self) -> String { (self.category.to_owned() + "/") }
fn components(&self) -> String { format!("cat={}", &self.category) }
}
|
fn isPrime(n: i32) -> bool {
let sn = ((n as f64).sqrt() as i32) + 1;
for i in 2..sn {
if n % i == 0 {
return false;
}
}
true
}
fn hasZero(n: i32) -> bool {
let mut n = n;
while n > 0 {
if n % 10 == 0 {
return true;
}
n = n / 10;
}
false
}
fn rotateNumber(n: i32) -> Vec<i32> {
let mut v: Vec<i32> = Vec::new();
v.push(n);
let mut m = (n % 10) * ((10.0 as f64).powi((n as f64).log(10.0) as i32) as i32) + (n / 10);
while n != m {
v.push(m.clone());
m = (m % 10) * ((10.0 as f64).powi((m as f64).log(10.0) as i32) as i32) + (m / 10);
}
v
}
fn main() {
let mut count = 0;
'out: for i in 2..1000001 {
if hasZero(i) {
continue;
}
let v = rotateNumber(i);
for x in v {
if !isPrime(x) {
continue 'out;
}
}
count += 1;
}
println!("{}", count);
}
|
extern crate civil;
use civil::units::conversions;
const PRECISION: f64 = 0.1;
#[test]
fn we_can_test() {
assert!(true)
}
#[test]
fn nonexistant_keys_return_none() {
let my_table = conversions::Table::new();
let my_bad_key = ("apple", "orange");
match my_table.convert.get(&my_bad_key) {
None => assert!(true),
_ => assert!(false),
}
}
#[test]
fn valid_key_gets_the_right_value() {
let my_table = conversions::Table::new();
let my_key = ("foot (English Imperial)", "meter");
let my_val = dbg!(my_table.convert.get(&my_key).unwrap());
assert!((0.3048 - my_val).abs() < PRECISION)
}
|
pub const AI_NODE_UNDEFINED: i32 = 0;
pub const AI_NODE_OPTIONS: i32 = 0x0001;
pub const AI_NODE_CAMERA: i32 = 0x0002;
pub const AI_NODE_LIGHT: i32 = 0x0004;
pub const AI_NODE_SHAPE: i32 = 0x0008;
pub const AI_NODE_SHADER: i32 = 0x0010;
pub const AI_NODE_OVERRIDE: i32 = 0x0020;
pub const AI_NODE_DRIVER: i32 = 0x0040;
pub const AI_NODE_FILTER: i32 = 0x0080;
pub const AI_NODE_COLOR_MANAGER: i32 = 0x0800;
pub const AI_NODE_ALL: i32 = 0xFFFF;
pub const AI_NODE_SHAPE_PROCEDURAL: i32 = 0x0100;
pub const AI_NODE_SHAPE_VOLUME: i32 = 0x0200;
pub const AI_NODE_SHAPE_IMPLICIT: i32 = 0x0400; |
extern crate tar;
extern crate flate2;
use std::io::{self, Read};
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::env;
use std::process;
fn decompress(tarpath: PathBuf, extract_path: PathBuf) -> io::Result<()> {
use flate2::read::GzDecoder;
use tar::Archive;
let tarball = fs::File::open(tarpath)?;
let decompressed = GzDecoder::new(tarball);
let mut archive = Archive::new(decompressed);
fs::create_dir_all(&extract_path)?;
archive.unpack(&extract_path)?;
Ok(())
}
fn compress(input_file: &str, output_file: PathBuf) -> io::Result<()> {
use flate2::write::GzEncoder;
use flate2::Compression;
use tar::Builder;
let file = File::create(&output_file)?;
let mut encoder = GzEncoder::new(file, Compression::default());
let mut builder = Builder::new(&mut encoder);
builder.append_path(input_file)?;
// scope Drop's builder, then encoder
Ok(())
}
fn verify(res: io::Result<()>) {
let _ = res.map_err(|e| {
println!("error: {}", e);
process::exit(1);
});
}
fn main() {
let pwd = env::current_dir().unwrap();
let data = "./data.txt";
let tarpath = Path::new(&pwd).join("data.tar.gz");
let extractpath = Path::new(&pwd).join("output");
verify(compress(data, tarpath.clone()));
println!("Compressed data");
verify(decompress(tarpath, extractpath));
println!("Decompressed data");
let mut f = File::open(Path::new(&pwd).join("output").join("data.txt")).unwrap();
let mut text = String::new();
f.read_to_string(&mut text).unwrap();
assert_eq!(&text, "hi\n");
println!("Verified data");
}
|
pub struct Solution;
impl Solution {
pub fn count_substrings(s: String) -> i32 {
let mut counter: i32 = 0;
let s = s.into_bytes();
let mut dp = vec![vec![false; s.len()]; s.len()];
for j in 0..s.len() {
for i in 0..j+1 {
if i == j{
dp[i][j] = true;
counter += 1;
}
else if j - i == 1 && s[i] == s[j] {
dp[i][j] = true;
counter += 1;
}
else if j - i > 1 && s[i] == s[j] && dp[i+1][j-1] == true {
dp[i][j] = true;
counter += 1;
}
}
}
return counter;
}
}
#[cfg(test)]
mod test {
use super::Solution;
#[test]
fn test() {
assert_eq!(Solution::count_substrings("abc".to_string()), 3);
}
} |
use crate::{
backend::SchemaBuilder, prepare::*, types::*, SchemaStatementBuilder, TableForeignKey,
};
/// Drop a foreign key constraint for an existing table
///
/// # Examples
///
/// ```
/// use sea_query::{*, tests_cfg::*};
///
/// let foreign_key = ForeignKey::drop()
/// .name("FK_character_font")
/// .table(Char::Table)
/// .to_owned();
///
/// assert_eq!(
/// foreign_key.to_string(MysqlQueryBuilder),
/// r#"ALTER TABLE `character` DROP FOREIGN KEY `FK_character_font`"#
/// );
/// assert_eq!(
/// foreign_key.to_string(PostgresQueryBuilder),
/// r#"ALTER TABLE "character" DROP CONSTRAINT "FK_character_font""#
/// );
/// // Sqlite does not support modification of foreign key constraints to existing tables
/// ```
#[derive(Debug, Clone)]
pub struct ForeignKeyDropStatement {
pub(crate) foreign_key: TableForeignKey,
pub(crate) table: Option<DynIden>,
}
impl Default for ForeignKeyDropStatement {
fn default() -> Self {
Self::new()
}
}
impl ForeignKeyDropStatement {
/// Construct a new [`ForeignKeyDropStatement`]
pub fn new() -> Self {
Self {
foreign_key: Default::default(),
table: None,
}
}
/// Set foreign key name
pub fn name(mut self, name: &str) -> Self {
self.foreign_key.name(name);
self
}
/// Set key table and referencing table
pub fn table<T: 'static>(mut self, table: T) -> Self
where
T: Iden,
{
self.table = Some(SeaRc::new(table));
self
}
}
impl SchemaStatementBuilder for ForeignKeyDropStatement {
fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String {
let mut sql = SqlWriter::new();
schema_builder.prepare_foreign_key_drop_statement(self, &mut sql);
sql.result()
}
fn build_any(&self, schema_builder: &dyn SchemaBuilder) -> String {
let mut sql = SqlWriter::new();
schema_builder.prepare_foreign_key_drop_statement(self, &mut sql);
sql.result()
}
}
|
#![allow(unused_variables)]
#[macro_use] extern crate enum_primitive;
extern crate libc;
extern crate num;
extern crate vibi;
// extern crate bismit;
pub use vibi::bismit;
// use c_void;
use libc::c_void;
use std::fmt::Debug;
use std::thread::{self, JoinHandle};
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{self, Sender, SyncSender, Receiver, TryRecvError};
use num::ToPrimitive;
use enum_primitive::FromPrimitive;
use bismit::{LayerMapSchemeList, AreaSchemeList, TypeId};
use bismit::flywheel::{Flywheel, Command, Request, Response, SensoryFrame, MotorFrame,
PathwayConfig};
pub mod config;
#[allow(dead_code)]
pub struct Tyro {
reward: f32,
th_flywheel: JoinHandle<()>,
command_tx: Sender<Command>,
request_tx: Sender<Request>,
response_rx: Receiver<Response>,
sensory_tx: SyncSender<SensoryFrame>,
motor_rx: Receiver<MotorFrame>,
}
impl Tyro {
pub fn new(lm_schemes: LayerMapSchemeList, a_schemes: AreaSchemeList) -> Tyro {
let (command_tx, command_rx) = mpsc::channel();
let (request_tx, request_rx) = mpsc::channel();
let (response_tx, response_rx) = mpsc::channel();
let (sensory_tx, sensory_rx) = mpsc::sync_channel(1);
let (motor_tx, motor_rx) = mpsc::sync_channel(1);
let command_tx_vibi = command_tx.clone();
let (request_tx_vibi, request_rx_vibi) = mpsc::channel();
let (response_tx_vibi, response_rx_vibi) = mpsc::channel();
let th_flywheel = thread::Builder::new().name("flywheel".to_string()).spawn(move || {
let mut flywheel = Flywheel::from_blueprint(lm_schemes,
a_schemes, None, command_rx, "v1".into());
flywheel.add_req_res_pair(request_rx, response_tx);
flywheel.add_req_res_pair(request_rx_vibi, response_tx_vibi);
// flywheel.add_sen_mot_pair(sensory_rx, motor_tx);
flywheel.add_sensory_rx(sensory_rx, "v0b".to_owned());
flywheel.add_motor_tx(motor_tx);
flywheel.spin();
}).expect("Error creating 'flywheel' thread");
let th_win = thread::Builder::new().name("vibi".to_string()).spawn(move || {
vibi::window::Window::open(command_tx_vibi, request_tx_vibi, response_rx_vibi);
}).expect("Error creating 'vibi' thread");
// Wait for the flywheel to initialize bismit:
request_tx.send(Request::CurrentIter).unwrap();
match response_rx.recv().unwrap() {
Response::CurrentIter(_) => (),
_ => panic!("Tyro::new(): Error initializing flywheel."),
}
Tyro {
reward: 0.0,
th_flywheel: th_flywheel,
command_tx: command_tx,
request_tx: request_tx,
response_rx: response_rx,
sensory_tx: sensory_tx,
motor_rx: motor_rx,
}
}
#[inline]
pub fn default() -> Tyro {
Tyro::new(config::define_lm_schemes(), config::define_a_schemes())
}
pub fn cycle(&self) {
loop {
match self.response_rx.try_recv() {
Ok(r) => {
match r {
_ => (),
}
}
Err(e) => match e {
TryRecvError::Empty => break,
TryRecvError::Disconnected => panic!("Tyro::cycle(): \
Flywheel disconnected."),
},
}
}
self.command_tx.send(Command::Iterate(1)).unwrap()
}
pub fn add_reward(&mut self, reward: f32) -> f32 {
self.reward += reward;
self.reward
}
pub fn reward(&self) -> f32 {
self.reward
}
pub fn push_vec_frame(&self, ptr: *const c_void, type_id: i32, dims: &[i64; 2]) {
let len = (dims[0] * dims[1]) as usize;
let f32_array16 = match TypeId::from_i32(type_id).expect("print_array(): Invalid type_id.") {
TypeId::Float32 => to_f32_arr(ptr as *const f32, len),
TypeId::Float64 => to_f32_arr(ptr as *const f64, len),
TypeId::Int32 => to_f32_arr(ptr as *const i32, len),
TypeId::Int64 => to_f32_arr(ptr as *const i64, len),
};
self.sensory_tx.send(SensoryFrame::F32Array16(f32_array16)).unwrap();
}
pub fn set_encoder_ranges(&self, lo_ptr: *const f64, hi_ptr: *const f64, len: usize) {
let mut ranges = Vec::with_capacity(len);
let lo = unsafe { std::slice::from_raw_parts(lo_ptr, len) };
let hi = unsafe { std::slice::from_raw_parts(hi_ptr, len) };
for i in 0..len {
ranges.push((lo[i] as f32, hi[i] as f32));
}
// let ranges_am = Arc::new(Mutex::new(ranges));
self.sensory_tx.send(SensoryFrame::PathwayConfig(PathwayConfig::EncoderRanges(
ranges))).unwrap();
self.command_tx.send(Command::None).unwrap();
}
}
impl Default for Tyro {
fn default() -> Tyro { Tyro::default() }
}
impl Drop for Tyro {
fn drop(&mut self) {
self.command_tx.send(Command::Exit).unwrap();
let th_flywheel = std::mem::replace(&mut self.th_flywheel,
thread::Builder::new().spawn(|| ()).unwrap());
if let Err(e) = th_flywheel.join() { println!("th_flywheel.join(): Error: '{:?}'", e); }
// if let Err(e) = self.th_vis.join() { println!("th_vin.join(): Error: '{:?}'", e); }
}
}
// ##########################################
// ############# MISC STUFF #################
// ##########################################
fn to_f32_arr<T: ToPrimitive>(ptr: *const T, len: usize) -> [f32; 16] {
let slice = unsafe { std::slice::from_raw_parts(ptr, len) };
let mut f32_array16 = [0.0f32; 16];
for (i, val) in slice.iter().enumerate() {
f32_array16[i] = val.to_f32().unwrap_or(0.0);
}
f32_array16
}
fn print_something<T: Debug>(ptr: *const T, len: usize) {
let slice = unsafe { std::slice::from_raw_parts(ptr, len) };
println!("Array Value: {:?}", slice);
}
// ##########################################
// ############## FFI STUFF #################
// ##########################################
#[no_mangle]
pub extern "C" fn set_encoder_ranges(tyro: &Tyro, lo_ptr: *const f64, hi_ptr: *const f64, len: i32) {
tyro.set_encoder_ranges(lo_ptr, hi_ptr, len as usize);
}
#[no_mangle]
pub extern "C" fn push_vec_frame(tyro: &Tyro, ptr: *const c_void, type_id: i32,
dims: &[i64; 2]) {
tyro.push_vec_frame(ptr, type_id, dims);
}
#[no_mangle]
pub extern "C" fn cycle(tyro: &Tyro) {
tyro.cycle();
}
#[no_mangle]
pub extern "C" fn print_array(ptr: *const c_void, type_id: i32, dims: &[i64; 2]) {
// println!("print_array(): dims: {:?}", dims);
let len = (dims[0] * dims[1]) as usize;
let ptr_typed = match TypeId::from_i32(type_id).expect("print_array(): Invalid type_id.") {
TypeId::Float32 => print_something(ptr as *const f32, len),
TypeId::Float64 => print_something(ptr as *const f64, len),
TypeId::Int32 => print_something(ptr as *const i32, len),
TypeId::Int64 => print_something(ptr as *const i64, len),
};
}
#[no_mangle]
pub extern "C" fn new_tyro() -> Box<Tyro> {
Box::new(Tyro::default())
}
#[no_mangle]
pub extern "C" fn send_input(ptr: *const c_void, dims: [i32; 2], type_id: i32) {
}
#[no_mangle]
pub extern "C" fn add_reward(tyro: &mut Tyro, reward: f32) -> f32 {
// println!("Adding reward: {}", reward);
tyro.add_reward(reward)
}
#[no_mangle]
pub extern "C" fn get_reward(tyro: &Tyro) -> f32 {
tyro.reward()
}
#[no_mangle]
pub extern "C" fn drop_tyro(_: Box<Tyro>) {
}
// ##########################################
// ############ BULLSHIT STUFF ##############
// ##########################################
#[no_mangle]
pub extern "C" fn print_int(a: i32) {
println!("Integer Value: {}", a);
}
#[no_mangle]
pub extern "C" fn print_array_f64(p: *const f64, len: i32) {
// println!("Array Value: {:?}", *array);
let slice = unsafe { std::slice::from_raw_parts(p, len as usize) };
println!("Array Value: {:?}", slice);
}
#[no_mangle]
pub extern "C" fn add_100(tyro: &Tyro, a: i32) -> i32 {
a + 100
}
|
use std::ffi::OsStr;
use std::process::exit;
use crate::cli::SudoOptions;
use crate::common::{resolve::expand_tilde_in_path, Context, Environment, Error};
use crate::env::environment;
use crate::exec::{ExecOutput, ExitReason};
use crate::log::{auth_info, auth_warn};
use crate::sudo::Duration;
use crate::sudoers::{Authorization, AuthorizationAllowed, DirChange, Policy, PreJudgementPolicy};
use crate::system::interface::UserId;
use crate::system::term::current_tty_name;
use crate::system::timestamp::{RecordScope, SessionRecordFile, TouchResult};
use crate::system::{escape_os_str_lossy, Process};
mod list;
pub trait PolicyPlugin {
type PreJudgementPolicy: PreJudgementPolicy;
type Policy: Policy;
fn init(&mut self) -> Result<Self::PreJudgementPolicy, Error>;
fn judge(
&mut self,
pre: Self::PreJudgementPolicy,
context: &Context,
) -> Result<Self::Policy, Error>;
}
pub trait AuthPlugin {
fn init(&mut self, context: &Context) -> Result<(), Error>;
fn authenticate(&mut self, non_interactive: bool, max_tries: u16) -> Result<(), Error>;
fn pre_exec(&mut self, target_user: &str) -> Result<Environment, Error>;
fn cleanup(&mut self);
}
pub struct Pipeline<Policy: PolicyPlugin, Auth: AuthPlugin> {
pub policy: Policy,
pub authenticator: Auth,
}
impl<Policy: PolicyPlugin, Auth: AuthPlugin> Pipeline<Policy, Auth> {
pub fn run(mut self, cmd_opts: SudoOptions) -> Result<(), Error> {
let pre = self.policy.init()?;
let mut context = build_context(cmd_opts, &pre)?;
let policy = self.policy.judge(pre, &context)?;
let authorization = policy.authorization();
match authorization {
Authorization::Forbidden => {
return Err(Error::auth(&format!(
"I'm sorry {}. I'm afraid I can't do that",
context.current_user.name
)));
}
Authorization::Allowed(auth) => {
self.apply_policy_to_context(&mut context, &policy)?;
self.auth_and_update_record_file(&context, auth)?;
}
}
let additional_env = self.authenticator.pre_exec(&context.target_user.name)?;
// build environment
let current_env = std::env::vars_os().collect();
let target_env =
environment::get_target_environment(current_env, additional_env, &context, &policy);
let pid = context.process.pid;
// run command and return corresponding exit code
let exec_result = if context.command.resolved {
log_command_execution(&context);
crate::exec::run_command(&context, target_env)
.map_err(|io_error| Error::IoError(Some(context.command.command), io_error))
} else {
Err(Error::CommandNotFound(context.command.command))
};
self.authenticator.cleanup();
let ExecOutput {
command_exit_reason,
restore_signal_handlers,
} = exec_result?;
// Run any clean-up code before this line.
restore_signal_handlers();
match command_exit_reason {
ExitReason::Code(code) => exit(code),
ExitReason::Signal(signal) => {
crate::system::kill(pid, signal)?;
}
}
Ok(())
}
pub fn run_validate(mut self, cmd_opts: SudoOptions) -> Result<(), Error> {
let pre = self.policy.init()?;
let context = build_context(cmd_opts, &pre)?;
match pre.validate_authorization() {
Authorization::Forbidden => {
return Err(Error::auth(&format!(
"I'm sorry {}. I'm afraid I can't do that",
context.current_user.name
)));
}
Authorization::Allowed(auth) => {
self.auth_and_update_record_file(&context, auth)?;
}
}
Ok(())
}
fn auth_and_update_record_file(
&mut self,
context: &Context,
AuthorizationAllowed {
must_authenticate,
prior_validity,
allowed_attempts,
}: AuthorizationAllowed,
) -> Result<(), Error> {
let scope = RecordScope::for_process(&Process::new());
let mut auth_status = determine_auth_status(
must_authenticate,
context.use_session_records,
scope,
context.current_user.uid,
&context.current_user.name,
prior_validity,
);
self.authenticator.init(context)?;
if auth_status.must_authenticate {
self.authenticator
.authenticate(context.non_interactive, allowed_attempts)?;
if let (Some(record_file), Some(scope)) = (&mut auth_status.record_file, scope) {
match record_file.create(scope, context.current_user.uid) {
Ok(_) => (),
Err(e) => {
auth_warn!("Could not update session record file with new record: {e}");
}
}
}
}
Ok(())
}
fn apply_policy_to_context(
&mut self,
context: &mut Context,
policy: &<Policy as PolicyPlugin>::Policy,
) -> Result<(), crate::common::Error> {
// see if the chdir flag is permitted
match policy.chdir() {
DirChange::Any => {}
DirChange::Strict(optdir) => {
if context.chdir.is_some() {
return Err(Error::ChDirNotAllowed {
chdir: context.chdir.clone().unwrap(),
command: context.command.command.clone(),
});
} else {
context.chdir = optdir.map(std::path::PathBuf::from)
}
}
}
// expand tildes in the path with the users home directory
if let Some(dir) = context.chdir.take() {
context.chdir = Some(expand_tilde_in_path(&context.target_user.name, dir)?)
}
// override the default pty behaviour if indicated
if !policy.use_pty() {
context.use_pty = false
}
Ok(())
}
}
fn build_context(cmd_opts: SudoOptions, pre: &dyn PreJudgementPolicy) -> Result<Context, Error> {
let secure_path: String = pre
.secure_path()
.unwrap_or_else(|| std::env::var("PATH").unwrap_or_default());
Context::build_from_options(cmd_opts, secure_path)
}
/// This should determine what the authentication status for the given record
/// match limit and origin/target user from the context is.
fn determine_auth_status(
must_policy_authenticate: bool,
use_session_records: bool,
record_for: Option<RecordScope>,
auth_uid: UserId,
current_user: &str,
prior_validity: Duration,
) -> AuthStatus {
if !must_policy_authenticate {
AuthStatus::new(false, None)
} else if let (true, Some(record_for)) = (use_session_records, record_for) {
match SessionRecordFile::open_for_user(current_user, prior_validity) {
Ok(mut sr) => {
match sr.touch(record_for, auth_uid) {
// if a record was found and updated within the timeout, we do not need to authenticate
Ok(TouchResult::Updated { .. }) => AuthStatus::new(false, Some(sr)),
Ok(TouchResult::NotFound | TouchResult::Outdated { .. }) => {
AuthStatus::new(true, Some(sr))
}
Err(e) => {
auth_warn!("Unexpected error while reading session information: {e}");
AuthStatus::new(true, None)
}
}
}
// if we cannot open the session record file we just assume there is none and continue as normal
Err(e) => {
auth_warn!("Could not use session information: {e}");
AuthStatus::new(true, None)
}
}
} else {
AuthStatus::new(true, None)
}
}
struct AuthStatus<'a> {
must_authenticate: bool,
record_file: Option<SessionRecordFile<'a>>,
}
impl<'a> AuthStatus<'a> {
fn new(must_authenticate: bool, record_file: Option<SessionRecordFile<'a>>) -> AuthStatus<'a> {
AuthStatus {
must_authenticate,
record_file,
}
}
}
fn log_command_execution(context: &Context) {
let tty_info = if let Ok(tty_name) = current_tty_name() {
format!("TTY={} ;", escape_os_str_lossy(&tty_name))
} else {
String::from("")
};
let pwd = escape_os_str_lossy(
std::env::current_dir()
.as_ref()
.map(|s| s.as_os_str())
.unwrap_or_else(|_| OsStr::new("unknown")),
);
let user = context.target_user.name.escape_debug().collect::<String>();
auth_info!(
"{} : {} PWD={} ; USER={} ; COMMAND={}",
&context.current_user.name,
tty_info,
pwd,
user,
&context.command
);
}
|
use crate::{
error::{RunnerError, TestResult},
util::{get_db, test_existing_directory},
TestFamily, TestID, TestToken,
};
use glob::Pattern;
use hashbrown::HashMap;
use parking_lot::Mutex;
use std::path::{Path, PathBuf};
use valis::{
source::{File as ValisFile, Package, PackageBuilder, SourceDatabase},
ValisDatabase,
};
use valis_ds::salsa::Snapshot;
#[derive(Debug)]
pub struct HirNameResTestFamily {
data_root: PathBuf,
name_prefix: String,
std_package: Package,
tests: HashMap<TestToken, ValisFile>,
db: Snapshot<ValisDatabase>,
}
const PASS_FAMILY_PATH: &str = "hir-name-res-pass";
const PASS_FAMILY_NAME: &str = "hir/name-res/pass";
#[allow(non_snake_case)]
pub fn INIT_PASS(root_path: &Path) -> Result<Box<dyn TestFamily>, RunnerError> {
let data_root = root_path.join(PASS_FAMILY_PATH);
test_existing_directory(&data_root)?;
let mut std_package_root = data_root.clone();
std_package_root.push("std");
test_existing_directory(&std_package_root)?;
let mut db = get_db();
let input_pattern = Pattern::new("*.va").unwrap();
let mut package_builder = PackageBuilder::new(&mut db);
package_builder.walk_directory(&std_package_root, |path| input_pattern.matches_path(path))?;
let std_package = package_builder.finish()?;
let name_prefix: String = PASS_FAMILY_NAME.into();
let package_data = db.package_data(std_package);
let tests = package_data
.file_data
.keys()
.zip(1..)
.map(|(file, id)| {
(
TestToken {
id: TestID::from_usize(id),
family_name: PASS_FAMILY_NAME,
},
*file,
)
})
.collect();
let family = HirNameResTestFamily {
data_root,
name_prefix: PASS_FAMILY_NAME.into(),
std_package,
tests,
db: Mutex::new(db),
};
Ok(Box::new(family))
}
fn run_hir_pass_test(db: &mut ValisDatabase, input_path: &Path, input_source: &str) -> String {}
impl TestFamily for HirNameResTestFamily {
fn name(&self) -> &'static str {
PASS_FAMILY_NAME
}
fn list(&self) -> Vec<TestToken> {
self.tests.iter().map(|(token, _)| token.clone()).collect()
}
fn update_single_test(&self, token: TestToken) -> Result<(), RunnerError> {}
fn generate_single_test(&self, token: TestToken) -> Result<(), RunnerError> {}
fn run_single_test(&self, token: TestToken) -> TestResult {}
fn test_name(&self, token: TestToken) -> Result<String, RunnerError> {}
}
|
fn main() {
println!("Hello, world!");
println!("{}", find_file_extension("main.rs").unwrap());
println!("{}", find_file_extension2("main.rs").unwrap());
}
fn find_file_extension(file: &str) -> Option<&str> {
match file.find('.') {
Some(i) => Some(&file[i + 1..]),
None => None,
}
}
fn find_file_extension2(file: &str) -> Option<&str> {
file.find('.').map(|i| &file[i + 1..])
}
|
#![allow(non_snake_case)]
#![allow(unused)]
mod parser;
use parser::*;
fn main() {
let mut tokenizer = Tokenizer::new();
let v = tokenizer.tokenize("if (a + b == 10) { print(\"123\") }");
println!("{:?}", v);
let mut tokenizer = Tokenizer::new();
let v = tokenizer.tokenize("fn print(a: String) { c.printf(\"%s\", a); }");
println!("{:?}", v);
}
|
extern crate phrases;
use phrases::japanese;
use phrases::english::{greetings as en_greetings,farewells as en_farewells};
fn main(){
println!("Hello in English: {}",en_greetings::hello());
println!("GoodBye in English: {}",en_farewells::goodbye());
println!("Hello in Japanese: {}",japanese::hello());
println!("Goodbye in Japanese: {}",japanese::goodbye());
}
|
extern crate find_folder;
extern crate hound;
extern crate sample;
extern crate rand;
use sample::{signal, Signal};
use rand::{Rng, SeedableRng, StdRng};
pub const NUM_CHANNELS: usize = 2;
pub type Frame = [i16; NUM_CHANNELS];
const SAMPLE_RATE: f64 = 44_100.0;
#[macro_use]
extern crate clap;
use clap::{App, Arg};
fn main() {
let matches = App::new("markov_jukebox")
.version(crate_version!())
.arg(
Arg::with_name("filenames")
.multiple(true)
.help("the path(s) of the file(s) you wish to blend together.")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("keep")
.short("k")
.help("keep used samples available for repeat uses."),
)
.arg(
Arg::with_name("seed")
.short("s")
.takes_value(true)
.help("the seed for the pseudo-random generator"),
)
.arg(Arg::with_name("debug").short("d").help(
"just write files directly to output. overrides all other flags",
))
.get_matches();
//there used to be a `play` option. That's why this is a struct instead of a bool
let settings = Settings {
blend: !matches.is_present("debug"),
remove: !matches.is_present("keep"),
};
let seed: Vec<usize> = if let Some(passed_seed) = matches.value_of("seed") {
passed_seed.as_bytes().iter().map(|&b| b as usize).collect()
} else {
if cfg!(debug_assertions) {
vec![42]
} else {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|dur| dur.as_secs())
.unwrap_or(42);
vec![timestamp as usize]
}
};
let mut rng: StdRng = SeedableRng::from_seed(seed.as_slice());
if let Some(filenames) = matches.values_of("filenames") {
run(filenames.collect(), settings, &mut rng).unwrap();
} else {
println!("No filenames recieved");
}
}
struct Settings {
pub remove: bool,
pub blend: bool,
}
fn run<R: Rng>(filenames: Vec<&str>, settings: Settings, rng: &mut R) -> Result<(), ()> {
let frames: Vec<Frame> = {
let mut frames = Vec::new();
for filename in filenames.iter() {
let current_frames: Vec<Frame> = read_frames(filename);
frames.extend(¤t_frames);
}
frames
};
if settings.blend {
let blended_frames = blend_frames(&frames, rng, settings.remove);
write_frames(&blended_frames, None);
} else {
write_frames(&frames, None);
}
Ok(())
}
// Given the file name, produces a Vec of `Frame`s which may be played back.
fn read_frames(file_name: &str) -> Vec<Frame> {
println!("Loading {}", file_name);
let mut reader = hound::WavReader::open(file_name).unwrap();
let spec = reader.spec();
assert!(spec.channels > 0, "{} says it has 0 channels?!", file_name);
let duration = reader.duration();
let new_duration = (duration as f64 * (SAMPLE_RATE as f64 / spec.sample_rate as f64)) as usize;
let samples: Box<Iterator<Item = i16>> = if spec.bits_per_sample <= 16 {
Box::new(reader.samples().map(|s| s.unwrap()))
} else {
Box::new(reader.samples::<f32>().map(|s| {
let f: f32 = s.unwrap();
(f * 32768.0) as i16
}))
};
let adjusted_samples: Vec<_> = if spec.channels == 2 {
samples.collect()
} else if spec.channels <= 1 {
samples.flat_map(|s| vec![s, s]).collect()
} else {
samples
.enumerate()
.filter(|&(i, _)| i % (spec.channels as usize) < 2)
.map(|(_, s)| s)
.collect()
};
let signal = signal::from_interleaved_samples::<_, Frame>(adjusted_samples.iter().cloned());
signal
.from_hz_to_hz(spec.sample_rate as f64, SAMPLE_RATE as f64)
.take(new_duration)
.collect()
}
const SILENCE: Frame = [0; NUM_CHANNELS];
fn blend_frames<R: Rng>(frames: &Vec<Frame>, rng: &mut R, remove: bool) -> Vec<Frame> {
let len = frames.len();
if len == 0 {
return Vec::new();
}
println!("get_next_frames");
let mut next_frames = get_next_frames(frames);
println!("done get_next_frames");
let mut result = Vec::with_capacity(len);
let default = vec![SILENCE];
let mut previous = (frames[0], frames[1]);
for i in 0..2 {
result.push(frames[i]);
}
let mut keys: Vec<(Frame, Frame)> = next_frames.keys().map(|&k| k).collect();
println!("sorting {}", keys.len());
keys.sort();
keys.reverse();
println!("shuffling");
rng.shuffle(&mut keys);
let mut count = 0;
let mut missed_count = 0;
let enough = if remove { (len * 3) / 4 } else { len };
while count < enough {
let choices = if remove {
next_frames
.remove(&previous)
.and_then(|c| if c.len() > 0 { Some(c) } else { None })
.unwrap_or_else(|| {
if cfg!(debug_assertions) {
println!("default at {}", count);
}
default.clone()
})
} else {
(*next_frames
.get(&previous)
.and_then(|c| if c.len() > 0 { Some(c) } else { None })
.unwrap_or_else(|| {
if cfg!(debug_assertions) {
println!("default at {}", count);
}
&default
})).clone()
};
let next = *rng.choose(&choices).unwrap();
if is_audible(&next) || is_audible(&previous.0) || is_audible(&previous.1) {
result.push(next);
missed_count = 0;
} else {
missed_count += 1;
}
if missed_count > 16 {
previous = *rng.choose(&keys).unwrap();
println!("rng.choose => {:?}", previous);
} else {
previous = (previous.1, next);
}
count += 1;
}
result
}
use std::collections::HashMap;
type NextFrames = HashMap<(Frame, Frame), Vec<Frame>>;
fn get_next_frames(frames: &Vec<Frame>) -> NextFrames {
let mut result = HashMap::new();
for window in frames.windows(3) {
result
.entry((window[0], window[1]))
.or_insert(Vec::new())
.push(window[2]);
}
result
}
fn is_audible(frame: &Frame) -> bool {
magnitude(frame) > 128
}
use std::cmp::max;
fn magnitude(frame: &Frame) -> i16 {
frame
.iter()
.fold(0, |acc, channel_value| max(acc, channel_value.abs()))
}
fn write_frames(frames: &Vec<Frame>, optional_name: Option<&str>) {
if let Some(name) = optional_name {
write_frames_with_name(frames, name)
} else {
let name = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0)
.to_string();
write_frames_with_name(frames, &name)
}
}
fn write_frames_with_name(frames: &Vec<Frame>, name: &str) {
let mut path = std::path::PathBuf::new();
path.push("output");
path.push(name);
path.set_extension("wav");
println!("Writing to {:?}", path.to_str().unwrap());
let spec = hound::WavSpec {
channels: NUM_CHANNELS as _,
sample_rate: 44100,
bits_per_sample: 16,
sample_format: hound::SampleFormat::Int,
};
let mut writer = hound::WavWriter::create(path, spec).unwrap();
for frame in frames.iter() {
for channel in 0..NUM_CHANNELS {
writer.write_sample(frame[channel]).unwrap();
}
}
writer.finalize().unwrap();
}
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Low-level bindings to the libuv library.
*
* This module contains a set of direct, 'bare-metal' wrappers around
* the libuv C-API.
*
* We're not bothering yet to redefine uv's structs as Rust structs
* because they are quite large and change often between versions.
* The maintenance burden is just too high. Instead we use the uv's
* `uv_handle_size` and `uv_req_size` to find the correct size of the
* structs and allocate them on the heap. This can be revisited later.
*
* There are also a collection of helper functions to ease interacting
* with the low-level API.
*
* As new functionality, existent in uv.h, is added to the rust stdlib,
* the mappings should be added in this module.
*/
#![allow(non_camel_case_types)] // C types
use libc::{size_t, c_int, c_uint, c_void, c_char, c_double};
use libc::{ssize_t, sockaddr, addrinfo};
use libc;
#[cfg(test)]
use libc::uintptr_t;
pub use self::errors::{EACCES, ECONNREFUSED, ECONNRESET, EPIPE, ECONNABORTED,
ECANCELED, EBADF, ENOTCONN, ENOENT, EADDRNOTAVAIL,
EADDRINUSE, EPERM, EINVAL};
pub use self::uv_membership::{UV_JOIN_GROUP, UV_LEAVE_GROUP};
pub use self::uv_handle_type::{UV_UNKNOWN_HANDLE, UV_ASYNC, UV_CHECK};
pub use self::uv_handle_type::{UV_FS_EVENT, UV_FS_POLL, UV_HANDLE, UV_IDLE};
pub use self::uv_handle_type::{UV_NAMED_PIPE, UV_POLL, UV_PREPARE, UV_PROCESS};
pub use self::uv_handle_type::{UV_STREAM, UV_TCP, UV_TIMER, UV_TTY, UV_UDP};
pub use self::uv_handle_type::{UV_SIGNAL, UV_FILE, UV_HANDLE_TYPE_MAX};
pub use self::uv_req_type::{UV_UNKNOWN_REQ, UV_REQ, UV_CONNECT, UV_WRITE};
pub use self::uv_req_type::{UV_SHUTDOWN, UV_UDP_SEND, UV_FS, UV_WORK};
pub use self::uv_req_type::{UV_GETADDRINFO, UV_GETNAMEINFO, UV_REQ_TYPE_MAX};
pub use self::uv_run_mode::{RUN_DEFAULT, RUN_ONCE, RUN_NOWAIT};
pub use self::uv_poll_event::{UV_READABLE, UV_WRITABLE};
pub static OK: c_int = 0;
pub static EOF: c_int = -4095;
pub static UNKNOWN: c_int = -4094;
// uv-errno.h redefines error codes for windows, but not for unix...
// https://github.com/joyent/libuv/blob/master/include/uv-errno.h
#[cfg(windows)]
pub mod errors {
use libc::c_int;
pub static EACCES: c_int = -4092;
pub static ECONNREFUSED: c_int = -4078;
pub static ECONNRESET: c_int = -4077;
pub static ENOENT: c_int = -4058;
pub static ENOTCONN: c_int = -4053;
pub static EPIPE: c_int = -4047;
pub static ECONNABORTED: c_int = -4079;
pub static ECANCELED: c_int = -4081;
pub static EBADF: c_int = -4083;
pub static EADDRNOTAVAIL: c_int = -4090;
pub static EADDRINUSE: c_int = -4091;
pub static EPERM: c_int = -4048;
pub static EINVAL: c_int = -4071;
}
#[cfg(not(windows))]
pub mod errors {
use libc;
use libc::c_int;
pub static EACCES: c_int = -libc::EACCES;
pub static ECONNREFUSED: c_int = -libc::ECONNREFUSED;
pub static ECONNRESET: c_int = -libc::ECONNRESET;
pub static ENOENT: c_int = -libc::ENOENT;
pub static ENOTCONN: c_int = -libc::ENOTCONN;
pub static EPIPE: c_int = -libc::EPIPE;
pub static ECONNABORTED: c_int = -libc::ECONNABORTED;
pub static ECANCELED : c_int = -libc::ECANCELED;
pub static EBADF : c_int = -libc::EBADF;
pub static EADDRNOTAVAIL : c_int = -libc::EADDRNOTAVAIL;
pub static EADDRINUSE : c_int = -libc::EADDRINUSE;
pub static EPERM: c_int = -libc::EPERM;
pub static EINVAL: c_int = -libc::EINVAL;
}
pub static PROCESS_SETUID: c_int = 1 << 0;
pub static PROCESS_SETGID: c_int = 1 << 1;
pub static PROCESS_WINDOWS_VERBATIM_ARGUMENTS: c_int = 1 << 2;
pub static PROCESS_DETACHED: c_int = 1 << 3;
pub static PROCESS_WINDOWS_HIDE: c_int = 1 << 4;
pub static STDIO_IGNORE: c_int = 0x00;
pub static STDIO_CREATE_PIPE: c_int = 0x01;
pub static STDIO_INHERIT_FD: c_int = 0x02;
pub static STDIO_INHERIT_STREAM: c_int = 0x04;
pub static STDIO_READABLE_PIPE: c_int = 0x10;
pub static STDIO_WRITABLE_PIPE: c_int = 0x20;
#[cfg(unix)]
pub type uv_buf_len_t = libc::size_t;
#[cfg(windows)]
pub type uv_buf_len_t = libc::c_ulong;
// see libuv/include/uv-unix.h
#[repr(C)]
#[cfg(unix)]
pub struct uv_buf_t {
pub base: *mut u8,
pub len: uv_buf_len_t,
}
#[cfg(unix)]
pub type uv_os_socket_t = c_int;
// see libuv/include/uv-win.h
#[cfg(windows)]
#[repr(C)]
pub struct uv_buf_t {
pub len: uv_buf_len_t,
pub base: *mut u8,
}
#[cfg(windows)]
pub type uv_os_socket_t = libc::SOCKET;
#[repr(C)]
pub enum uv_run_mode {
RUN_DEFAULT = 0,
RUN_ONCE,
RUN_NOWAIT,
}
#[repr(C)]
pub enum uv_poll_event {
UV_READABLE = 1,
UV_WRITABLE = 2,
}
#[repr(C)]
pub struct uv_process_options_t {
pub exit_cb: uv_exit_cb,
pub file: *const libc::c_char,
pub args: *const *const libc::c_char,
pub env: *const *const libc::c_char,
pub cwd: *const libc::c_char,
pub flags: libc::c_uint,
pub stdio_count: libc::c_int,
pub stdio: *mut uv_stdio_container_t,
pub uid: uv_uid_t,
pub gid: uv_gid_t,
}
// These fields are private because they must be interfaced with through the
// functions below.
#[repr(C)]
pub struct uv_stdio_container_t {
flags: libc::c_int,
stream: *mut uv_stream_t,
}
// handles
pub enum uv_async_t {}
pub enum uv_handle_t {}
pub enum uv_idle_t {}
pub enum uv_pipe_t {}
pub enum uv_poll_t {}
pub enum uv_process_t {}
pub enum uv_signal_t {}
pub enum uv_stream_t {}
pub enum uv_tcp_t {}
pub enum uv_timer_t {}
pub enum uv_tty_t {}
pub enum uv_udp_send_t {}
pub enum uv_udp_t {}
// reqs
pub enum uv_connect_t {}
pub enum uv_fs_t {}
pub enum uv_getaddrinfo_t {}
pub enum uv_req_t {}
pub enum uv_shutdown_t {}
pub enum uv_write_t {}
// misc
pub enum uv_loop_t {}
pub enum uv_connection_t {}
#[repr(C)]
pub struct uv_timespec_t {
pub tv_sec: libc::c_long,
pub tv_nsec: libc::c_long
}
#[repr(C)]
pub struct uv_stat_t {
pub st_dev: libc::uint64_t,
pub st_mode: libc::uint64_t,
pub st_nlink: libc::uint64_t,
pub st_uid: libc::uint64_t,
pub st_gid: libc::uint64_t,
pub st_rdev: libc::uint64_t,
pub st_ino: libc::uint64_t,
pub st_size: libc::uint64_t,
pub st_blksize: libc::uint64_t,
pub st_blocks: libc::uint64_t,
pub st_flags: libc::uint64_t,
pub st_gen: libc::uint64_t,
pub st_atim: uv_timespec_t,
pub st_mtim: uv_timespec_t,
pub st_ctim: uv_timespec_t,
pub st_birthtim: uv_timespec_t
}
impl uv_stat_t {
pub fn new() -> uv_stat_t {
uv_stat_t {
st_dev: 0,
st_mode: 0,
st_nlink: 0,
st_uid: 0,
st_gid: 0,
st_rdev: 0,
st_ino: 0,
st_size: 0,
st_blksize: 0,
st_blocks: 0,
st_flags: 0,
st_gen: 0,
st_atim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_mtim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_ctim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 },
st_birthtim: uv_timespec_t { tv_sec: 0, tv_nsec: 0 }
}
}
pub fn is_file(&self) -> bool {
((self.st_mode) & libc::S_IFMT as libc::uint64_t) == libc::S_IFREG as libc::uint64_t
}
pub fn is_dir(&self) -> bool {
((self.st_mode) & libc::S_IFMT as libc::uint64_t) == libc::S_IFDIR as libc::uint64_t
}
}
pub type uv_idle_cb = extern "C" fn(handle: *mut uv_idle_t);
pub type uv_alloc_cb = extern "C" fn(stream: *mut uv_handle_t,
suggested_size: size_t,
buf: *mut uv_buf_t);
pub type uv_read_cb = extern "C" fn(stream: *mut uv_stream_t,
nread: ssize_t,
buf: *const uv_buf_t);
pub type uv_udp_send_cb = extern "C" fn(req: *mut uv_udp_send_t,
status: c_int);
pub type uv_udp_recv_cb = extern "C" fn(handle: *mut uv_udp_t,
nread: ssize_t,
buf: *const uv_buf_t,
addr: *const sockaddr,
flags: c_uint);
pub type uv_close_cb = extern "C" fn(handle: *mut uv_handle_t);
pub type uv_poll_cb = extern "C" fn(handle: *mut uv_poll_t,
status: c_int,
events: c_int);
pub type uv_walk_cb = extern "C" fn(handle: *mut uv_handle_t,
arg: *mut c_void);
pub type uv_async_cb = extern "C" fn(handle: *mut uv_async_t);
pub type uv_connect_cb = extern "C" fn(handle: *mut uv_connect_t,
status: c_int);
pub type uv_connection_cb = extern "C" fn(handle: *mut uv_stream_t,
status: c_int);
pub type uv_timer_cb = extern "C" fn(handle: *mut uv_timer_t);
pub type uv_write_cb = extern "C" fn(handle: *mut uv_write_t,
status: c_int);
pub type uv_getaddrinfo_cb = extern "C" fn(req: *mut uv_getaddrinfo_t,
status: c_int,
res: *const addrinfo);
pub type uv_exit_cb = extern "C" fn(handle: *mut uv_process_t,
exit_status: i64,
term_signal: c_int);
pub type uv_signal_cb = extern "C" fn(handle: *mut uv_signal_t,
signum: c_int);
pub type uv_fs_cb = extern "C" fn(req: *mut uv_fs_t);
pub type uv_shutdown_cb = extern "C" fn(req: *mut uv_shutdown_t, status: c_int);
#[cfg(unix)] pub type uv_uid_t = libc::types::os::arch::posix88::uid_t;
#[cfg(unix)] pub type uv_gid_t = libc::types::os::arch::posix88::gid_t;
#[cfg(windows)] pub type uv_uid_t = libc::c_uchar;
#[cfg(windows)] pub type uv_gid_t = libc::c_uchar;
#[repr(C)]
#[deriving(PartialEq)]
pub enum uv_handle_type {
UV_UNKNOWN_HANDLE,
UV_ASYNC,
UV_CHECK,
UV_FS_EVENT,
UV_FS_POLL,
UV_HANDLE,
UV_IDLE,
UV_NAMED_PIPE,
UV_POLL,
UV_PREPARE,
UV_PROCESS,
UV_STREAM,
UV_TCP,
UV_TIMER,
UV_TTY,
UV_UDP,
UV_SIGNAL,
UV_FILE,
UV_HANDLE_TYPE_MAX
}
#[repr(C)]
#[cfg(unix)]
#[deriving(PartialEq)]
pub enum uv_req_type {
UV_UNKNOWN_REQ,
UV_REQ,
UV_CONNECT,
UV_WRITE,
UV_SHUTDOWN,
UV_UDP_SEND,
UV_FS,
UV_WORK,
UV_GETADDRINFO,
UV_GETNAMEINFO,
UV_REQ_TYPE_MAX
}
// uv_req_type may have additional fields defined by UV_REQ_TYPE_PRIVATE.
// See UV_REQ_TYPE_PRIVATE at libuv/include/uv-win.h
#[repr(C)]
#[cfg(windows)]
#[deriving(PartialEq)]
pub enum uv_req_type {
UV_UNKNOWN_REQ,
UV_REQ,
UV_CONNECT,
UV_WRITE,
UV_SHUTDOWN,
UV_UDP_SEND,
UV_FS,
UV_WORK,
UV_GETNAMEINFO,
UV_GETADDRINFO,
UV_ACCEPT,
UV_FS_EVENT_REQ,
UV_POLL_REQ,
UV_PROCESS_EXIT,
UV_READ,
UV_UDP_RECV,
UV_WAKEUP,
UV_SIGNAL_REQ,
UV_REQ_TYPE_MAX
}
#[repr(C)]
#[deriving(PartialEq)]
pub enum uv_membership {
UV_LEAVE_GROUP,
UV_JOIN_GROUP
}
// pub unsafe fn malloc_req(req: uv_req_type) -> *mut c_void {
// assert!(req != UV_UNKNOWN_REQ && req != UV_REQ_TYPE_MAX);
// let size = uv_req_size(req);
// malloc_raw(size as uint) as *mut c_void
// }
//
// pub unsafe fn free_req(v: *mut c_void) {
// free(v as *mut c_void)
// }
#[test]
fn handle_sanity_check() {
unsafe {
assert_eq!(UV_HANDLE_TYPE_MAX as libc::uintptr_t, rust_uv_handle_type_max());
}
}
#[test]
fn request_sanity_check() {
unsafe {
assert_eq!(UV_REQ_TYPE_MAX as libc::uintptr_t, rust_uv_req_type_max());
}
}
// uv_support is the result of compiling rust_uv.c
#[link(name = "uv_support", kind = "static")]
#[link(name = "uv", kind = "static")]
extern {
pub fn uv_loop_size() -> size_t;
pub fn uv_loop_init(l: *mut uv_loop_t) -> c_int;
pub fn uv_loop_close(l: *mut uv_loop_t) -> c_int;
#[cfg(test)]
fn rust_uv_handle_type_max() -> uintptr_t;
#[cfg(test)]
fn rust_uv_req_type_max() -> uintptr_t;
pub fn rust_uv_get_udp_handle_from_send_req(req: *mut uv_udp_send_t)
-> *mut uv_udp_t;
pub fn rust_uv_populate_uv_stat(req_in: *mut uv_fs_t,
stat_out: *mut uv_stat_t);
pub fn rust_uv_get_result_from_fs_req(req: *mut uv_fs_t) -> ssize_t;
pub fn rust_uv_get_ptr_from_fs_req(req: *mut uv_fs_t) -> *mut libc::c_void;
pub fn rust_uv_get_path_from_fs_req(req: *mut uv_fs_t) -> *const c_char;
pub fn rust_uv_get_loop_from_fs_req(req: *mut uv_fs_t) -> *mut uv_loop_t;
pub fn rust_uv_get_loop_from_getaddrinfo_req(req: *mut uv_fs_t)
-> *mut uv_loop_t;
pub fn rust_uv_get_stream_handle_from_connect_req(req: *mut uv_connect_t)
-> *mut uv_stream_t;
pub fn rust_uv_get_stream_handle_from_write_req(req: *mut uv_write_t)
-> *mut uv_stream_t;
pub fn rust_uv_get_data_for_uv_loop(l: *mut uv_loop_t) -> *mut c_void;
pub fn rust_uv_set_data_for_uv_loop(l: *mut uv_loop_t, data: *mut c_void);
pub fn rust_uv_get_loop_for_uv_handle(handle: *mut uv_handle_t)
-> *mut uv_loop_t;
pub fn rust_uv_get_data_for_uv_handle(handle: *mut uv_handle_t)
-> *mut c_void;
pub fn rust_uv_set_data_for_uv_handle(handle: *mut uv_handle_t,
data: *mut c_void);
pub fn rust_uv_get_data_for_req(req: *mut c_void) -> *mut c_void;
pub fn rust_uv_set_data_for_req(req: *mut c_void, data: *mut c_void);
pub fn rust_set_stdio_container_flags(c: *mut uv_stdio_container_t, flags: c_int);
pub fn rust_set_stdio_container_fd(c: *mut uv_stdio_container_t, fd: c_int);
pub fn rust_set_stdio_container_stream(c: *mut uv_stdio_container_t,
stream: *mut uv_stream_t);
pub fn rust_uv_process_pid(p: *mut uv_process_t) -> c_int;
pub fn rust_uv_guess_handle(fd: c_int) -> c_int;
// generic uv functions
pub fn uv_loop_delete(l: *mut uv_loop_t);
pub fn uv_ref(t: *mut uv_handle_t);
pub fn uv_unref(t: *mut uv_handle_t);
pub fn uv_handle_size(ty: uv_handle_type) -> size_t;
pub fn uv_req_size(ty: uv_req_type) -> size_t;
pub fn uv_run(l: *mut uv_loop_t, mode: uv_run_mode) -> c_int;
pub fn uv_close(h: *mut uv_handle_t, cb: Option<uv_close_cb>);
pub fn uv_walk(l: *mut uv_loop_t, cb: uv_walk_cb, arg: *mut c_void);
pub fn uv_buf_init(base: *mut c_char, len: c_uint) -> uv_buf_t;
pub fn uv_strerror(err: c_int) -> *const c_char;
pub fn uv_err_name(err: c_int) -> *const c_char;
pub fn uv_listen(s: *mut uv_stream_t, backlog: c_int,
cb: uv_connection_cb) -> c_int;
pub fn uv_accept(server: *mut uv_stream_t, client: *mut uv_stream_t) -> c_int;
pub fn uv_read_start(stream: *mut uv_stream_t,
on_alloc: uv_alloc_cb,
on_read: uv_read_cb) -> c_int;
pub fn uv_read_stop(stream: *mut uv_stream_t) -> c_int;
pub fn uv_shutdown(req: *mut uv_shutdown_t, handle: *mut uv_stream_t,
cb: uv_shutdown_cb) -> c_int;
pub fn uv_write(req: *mut uv_write_t, stream: *mut uv_stream_t,
buf_in: *const uv_buf_t, buf_cnt: c_int,
cb: uv_write_cb) -> c_int;
pub fn uv_cancel(req: *mut uv_req_t) -> c_int;
// idle bindings
pub fn uv_idle_init(l: *mut uv_loop_t, i: *mut uv_idle_t) -> c_int;
pub fn uv_idle_start(i: *mut uv_idle_t, cb: uv_idle_cb) -> c_int;
pub fn uv_idle_stop(i: *mut uv_idle_t) -> c_int;
// async bindings
pub fn uv_async_init(l: *mut uv_loop_t, a: *mut uv_async_t,
cb: uv_async_cb) -> c_int;
pub fn uv_async_send(a: *mut uv_async_t);
// tcp bindings
pub fn uv_tcp_init(l: *mut uv_loop_t, h: *mut uv_tcp_t) -> c_int;
pub fn uv_tcp_open(h: *mut uv_tcp_t, sock: uv_os_socket_t) -> c_int;
pub fn uv_tcp_connect(c: *mut uv_connect_t, h: *mut uv_tcp_t,
addr: *const sockaddr, cb: uv_connect_cb) -> c_int;
pub fn uv_tcp_bind(t: *mut uv_tcp_t,
addr: *const sockaddr,
flags: c_uint) -> c_int;
pub fn uv_tcp_nodelay(h: *mut uv_tcp_t, enable: c_int) -> c_int;
pub fn uv_tcp_keepalive(h: *mut uv_tcp_t, enable: c_int,
delay: c_uint) -> c_int;
pub fn uv_tcp_simultaneous_accepts(h: *mut uv_tcp_t, enable: c_int) -> c_int;
pub fn uv_tcp_getsockname(h: *const uv_tcp_t, name: *mut sockaddr,
len: *mut c_int) -> c_int;
pub fn uv_tcp_getpeername(h: *const uv_tcp_t, name: *mut sockaddr,
len: *mut c_int) -> c_int;
// udp bindings
pub fn uv_udp_init(l: *mut uv_loop_t, h: *mut uv_udp_t) -> c_int;
pub fn uv_udp_open(h: *mut uv_udp_t, sock: uv_os_socket_t) -> c_int;
pub fn uv_udp_bind(h: *mut uv_udp_t, addr: *const sockaddr,
flags: c_uint) -> c_int;
pub fn uv_udp_recv_start(server: *mut uv_udp_t,
on_alloc: uv_alloc_cb,
on_recv: uv_udp_recv_cb) -> c_int;
pub fn uv_udp_set_membership(handle: *mut uv_udp_t,
multicast_addr: *const c_char,
interface_addr: *const c_char,
membership: uv_membership) -> c_int;
pub fn uv_udp_recv_stop(server: *mut uv_udp_t) -> c_int;
pub fn uv_udp_set_multicast_loop(handle: *mut uv_udp_t, on: c_int) -> c_int;
pub fn uv_udp_set_multicast_ttl(handle: *mut uv_udp_t, ttl: c_int) -> c_int;
pub fn uv_udp_set_ttl(handle: *mut uv_udp_t, ttl: c_int) -> c_int;
pub fn uv_udp_set_broadcast(handle: *mut uv_udp_t, on: c_int) -> c_int;
pub fn uv_udp_getsockname(h: *const uv_udp_t, name: *mut sockaddr,
len: *mut c_int) -> c_int;
pub fn uv_udp_send(req: *mut uv_udp_send_t, stream: *mut uv_stream_t,
buf_in: *const uv_buf_t, buf_cnt: c_int,
addr: *const sockaddr,
cb: uv_udp_send_cb) -> c_int;
pub fn uv_udp_try_send(handle: *mut uv_udp_t,
bufs: *const uv_buf_t,
nbufs: c_uint,
addr: *const sockaddr) -> c_int;
// timer bindings
pub fn uv_timer_init(l: *mut uv_loop_t, t: *mut uv_timer_t) -> c_int;
pub fn uv_timer_start(t: *mut uv_timer_t, cb: uv_timer_cb,
timeout: libc::uint64_t,
repeat: libc::uint64_t) -> c_int;
pub fn uv_timer_stop(handle: *mut uv_timer_t) -> c_int;
pub fn uv_timer_set_repeat(handle: *mut uv_timer_t, repeat: u64);
pub fn uv_timer_get_repeat(handle: *const uv_timer_t) -> u64;
pub fn uv_timer_again(handle: *mut uv_timer_t) -> c_int;
// fs operations
pub fn uv_fs_open(loop_ptr: *mut uv_loop_t, req: *mut uv_fs_t,
path: *const c_char, flags: c_int, mode: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_unlink(loop_ptr: *mut uv_loop_t, req: *mut uv_fs_t,
path: *const c_char, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_write(l: *mut uv_loop_t, req: *mut uv_fs_t, fd: c_int,
bufs: *const uv_buf_t, nbufs: c_uint,
offset: i64, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_read(l: *mut uv_loop_t, req: *mut uv_fs_t, fd: c_int,
bufs: *const uv_buf_t, nbufs: c_uint,
offset: i64, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_close(l: *mut uv_loop_t, req: *mut uv_fs_t, fd: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_stat(l: *mut uv_loop_t, req: *mut uv_fs_t, path: *const c_char,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_fstat(l: *mut uv_loop_t, req: *mut uv_fs_t, fd: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_mkdir(l: *mut uv_loop_t, req: *mut uv_fs_t, path: *const c_char,
mode: c_int, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_rmdir(l: *mut uv_loop_t, req: *mut uv_fs_t, path: *const c_char,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_readdir(l: *mut uv_loop_t, req: *mut uv_fs_t,
path: *const c_char, flags: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_req_cleanup(req: *mut uv_fs_t);
pub fn uv_fs_fsync(handle: *mut uv_loop_t, req: *mut uv_fs_t, file: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_fdatasync(handle: *mut uv_loop_t, req: *mut uv_fs_t, file: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_ftruncate(handle: *mut uv_loop_t, req: *mut uv_fs_t, file: c_int,
offset: i64, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_readlink(handle: *mut uv_loop_t, req: *mut uv_fs_t,
file: *const c_char, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_symlink(handle: *mut uv_loop_t, req: *mut uv_fs_t,
src: *const c_char, dst: *const c_char, flags: c_int,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_rename(handle: *mut uv_loop_t, req: *mut uv_fs_t,
src: *const c_char, dst: *const c_char,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_utime(handle: *mut uv_loop_t, req: *mut uv_fs_t,
path: *const c_char, atime: c_double, mtime: c_double,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_link(handle: *mut uv_loop_t, req: *mut uv_fs_t,
src: *const c_char, dst: *const c_char,
cb: uv_fs_cb) -> c_int;
pub fn uv_fs_chown(handle: *mut uv_loop_t, req: *mut uv_fs_t, src: *const c_char,
uid: uv_uid_t, gid: uv_gid_t, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_chmod(handle: *mut uv_loop_t, req: *mut uv_fs_t,
path: *const c_char, mode: c_int, cb: uv_fs_cb) -> c_int;
pub fn uv_fs_lstat(handle: *mut uv_loop_t, req: *mut uv_fs_t,
file: *const c_char, cb: uv_fs_cb) -> c_int;
// poll bindings
pub fn uv_poll_init_socket(l: *mut uv_loop_t, h: *mut uv_poll_t, s: uv_os_socket_t) -> c_int;
pub fn uv_poll_start(h: *mut uv_poll_t, events: c_int, cb: uv_poll_cb) -> c_int;
pub fn uv_poll_stop(h: *mut uv_poll_t) -> c_int;
// getaddrinfo
pub fn uv_getaddrinfo(loop_: *mut uv_loop_t, req: *mut uv_getaddrinfo_t,
getaddrinfo_cb: uv_getaddrinfo_cb,
node: *const c_char, service: *const c_char,
hints: *const addrinfo) -> c_int;
pub fn uv_freeaddrinfo(ai: *mut addrinfo);
// process spawning
pub fn uv_spawn(loop_ptr: *mut uv_loop_t, outptr: *mut uv_process_t,
options: *mut uv_process_options_t) -> c_int;
pub fn uv_process_kill(p: *mut uv_process_t, signum: c_int) -> c_int;
pub fn uv_kill(pid: c_int, signum: c_int) -> c_int;
// pipes
pub fn uv_pipe_init(l: *mut uv_loop_t, p: *mut uv_pipe_t,
ipc: c_int) -> c_int;
pub fn uv_pipe_open(pipe: *mut uv_pipe_t, file: c_int) -> c_int;
pub fn uv_pipe_bind(pipe: *mut uv_pipe_t, name: *const c_char) -> c_int;
pub fn uv_pipe_connect(req: *mut uv_connect_t, handle: *mut uv_pipe_t,
name: *const c_char, cb: uv_connect_cb);
// tty
pub fn uv_tty_init(l: *mut uv_loop_t, tty: *mut uv_tty_t, fd: c_int,
readable: c_int) -> c_int;
pub fn uv_tty_set_mode(tty: *mut uv_tty_t, mode: c_int) -> c_int;
pub fn uv_tty_get_winsize(tty: *mut uv_tty_t,
width: *mut c_int,
height: *mut c_int) -> c_int;
pub fn uv_tty_reset_mode() -> c_int;
// signals
pub fn uv_signal_init(loop_: *mut uv_loop_t,
handle: *mut uv_signal_t) -> c_int;
pub fn uv_signal_start(h: *mut uv_signal_t, cb: uv_signal_cb,
signum: c_int) -> c_int;
pub fn uv_signal_stop(handle: *mut uv_signal_t) -> c_int;
}
// libuv requires other native libraries on various platforms. These are all
// listed here (for each platform)
// libuv doesn't use pthread on windows
// android libc (bionic) provides pthread, so no additional link is required
#[cfg(all(not(windows), not(target_os = "android")))]
#[link(name = "pthread")]
extern {}
#[cfg(any(target_os = "linux",
target_os = "dragonfly"))]
#[link(name = "rt")]
extern {}
#[cfg(windows)]
#[link(name = "ws2_32")]
#[link(name = "psapi")]
#[link(name = "iphlpapi")]
extern {}
#[cfg(any(target_os = "freebsd",
target_os = "dragonfly"))]
#[link(name = "kvm")]
extern {}
|
use std::future::Future;
use std::sync::Arc;
use std::{fmt, io};
use once_cell::sync::Lazy;
pub(crate) mod time;
mod local_worker;
pub(crate) use local_worker::LocalHandle;
use local_worker::LocalWorker;
pub(crate) fn get_default_runtime_size() -> usize {
// We use num_cpus as std::thread::available_parallelism() does not take
// system resource constraint (e.g.: cgroups) into consideration.
num_cpus::get()
}
#[inline(always)]
pub(super) fn spawn_local<F>(f: F)
where
F: Future<Output = ()> + 'static,
{
match LocalHandle::try_current() {
Some(m) => {
// If within a Yew runtime, use a local handle increases the local task count.
m.spawn_local(f);
}
None => {
tokio::task::spawn_local(f);
}
}
}
#[derive(Clone)]
pub(crate) struct Runtime {
workers: Arc<Vec<LocalWorker>>,
}
impl fmt::Debug for Runtime {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Runtime")
.field("workers", &"Vec<LocalWorker>")
.finish()
}
}
impl Default for Runtime {
fn default() -> Self {
static DEFAULT_RT: Lazy<Runtime> = Lazy::new(|| {
Runtime::new(get_default_runtime_size()).expect("failed to create runtime.")
});
DEFAULT_RT.clone()
}
}
impl Runtime {
pub fn new(size: usize) -> io::Result<Self> {
assert!(size > 0, "must have more than 1 worker.");
let mut workers = Vec::with_capacity(size);
for _ in 0..size {
let worker = LocalWorker::new()?;
workers.push(worker);
}
Ok(Self {
workers: workers.into(),
})
}
fn find_least_busy_local_worker(&self) -> &LocalWorker {
let mut workers = self.workers.iter();
let mut worker = workers.next().expect("must have more than 1 worker.");
let mut task_count = worker.task_count();
for current_worker in workers {
if task_count == 0 {
// We don't have to search until the end.
break;
}
let current_worker_task_count = current_worker.task_count();
if current_worker_task_count < task_count {
task_count = current_worker_task_count;
worker = current_worker;
}
}
worker
}
pub fn spawn_pinned<F, Fut>(&self, create_task: F)
where
F: FnOnce() -> Fut,
F: Send + 'static,
Fut: Future<Output = ()> + 'static,
{
let worker = self.find_least_busy_local_worker();
worker.spawn_pinned(create_task);
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use futures::channel::oneshot;
use tokio::test;
use tokio::time::timeout;
use super::*;
#[test]
async fn test_spawn_pinned_least_busy() {
let runtime = Runtime::new(2).expect("failed to create runtime.");
let (tx1, rx1) = oneshot::channel();
let (tx2, rx2) = oneshot::channel();
runtime.spawn_pinned(move || async move {
tx1.send(std::thread::current().id())
.expect("failed to send!");
});
runtime.spawn_pinned(move || async move {
tx2.send(std::thread::current().id())
.expect("failed to send!");
});
let result1 = timeout(Duration::from_secs(5), rx1)
.await
.expect("task timed out")
.expect("failed to receive");
let result2 = timeout(Duration::from_secs(5), rx2)
.await
.expect("task timed out")
.expect("failed to receive");
// first task and second task are not on the same thread.
assert_ne!(result1, result2);
}
#[test]
async fn test_spawn_local_within_send() {
let runtime = Runtime::new(1).expect("failed to create runtime.");
let (tx, rx) = oneshot::channel();
runtime.spawn_pinned(move || async move {
tokio::task::spawn(async move {
// tokio::task::spawn_local cannot spawn tasks outside of a local context.
//
// yew::platform::spawn_local can spawn tasks within a Send task as long as running
// under a Yew Runtime.
spawn_local(async move {
tx.send(()).expect("failed to send!");
})
});
});
timeout(Duration::from_secs(5), rx)
.await
.expect("task timed out")
.expect("failed to receive");
}
}
|
// Copyright 2018-2019 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std::{fmt, io, path::PathBuf};
use crate::{backend::traits::BackendError, error::StoreError};
#[derive(Debug)]
pub enum ErrorImpl {
LmdbError(lmdb::Error),
UnsuitableEnvironmentPath(PathBuf),
IoError(io::Error),
}
impl BackendError for ErrorImpl {}
impl fmt::Display for ErrorImpl {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorImpl::LmdbError(e) => e.fmt(fmt),
ErrorImpl::UnsuitableEnvironmentPath(_) => write!(fmt, "UnsuitableEnvironmentPath"),
ErrorImpl::IoError(e) => e.fmt(fmt),
}
}
}
impl Into<StoreError> for ErrorImpl {
fn into(self) -> StoreError {
match self {
ErrorImpl::LmdbError(lmdb::Error::Corrupted) => StoreError::DatabaseCorrupted,
ErrorImpl::LmdbError(lmdb::Error::NotFound) => StoreError::KeyValuePairNotFound,
ErrorImpl::LmdbError(lmdb::Error::BadValSize) => StoreError::KeyValuePairBadSize,
ErrorImpl::LmdbError(lmdb::Error::Invalid) => StoreError::FileInvalid,
ErrorImpl::LmdbError(lmdb::Error::MapFull) => StoreError::MapFull,
ErrorImpl::LmdbError(lmdb::Error::DbsFull) => StoreError::DbsFull,
ErrorImpl::LmdbError(lmdb::Error::ReadersFull) => StoreError::ReadersFull,
ErrorImpl::LmdbError(error) => StoreError::LmdbError(error),
ErrorImpl::UnsuitableEnvironmentPath(path) => {
StoreError::UnsuitableEnvironmentPath(path)
}
ErrorImpl::IoError(error) => StoreError::IoError(error),
}
}
}
impl From<io::Error> for ErrorImpl {
fn from(e: io::Error) -> ErrorImpl {
ErrorImpl::IoError(e)
}
}
|
extern crate nom_locate;
extern crate safe_lua;
use nom_locate::LocatedSpan;
use safe_lua::compile::Sourcemap;
use safe_lua::compile::SourcemapLoc;
#[test]
fn test_sourcemaps() {
let loc_0 = LocatedSpan::new("Location Zero");
let mut sm = Sourcemap::new("loc");
sm.write_map(0, loc_0);
assert_eq!(sm.get_location(0).unwrap(), SourcemapLoc::new(1));
}
|
use std::collections::VecDeque;
fn main() -> std::io::Result<()> {
let input = std::fs::read_to_string("examples/22/input.txt")?;
let initial_decks: Vec<VecDeque<_>> = input
.split("\n\n")
.map(|x| {
x.lines()
.skip(1)
.map(|line| line.parse::<u64>().unwrap())
.collect()
})
.collect();
let mut decks = initial_decks.clone();
let total_cards: usize = decks.iter().map(|x| x.len()).sum();
while !decks.iter().any(|x| x.len() == total_cards) {
let mut max = (0, 0);
let mut cards = VecDeque::new();
for (i, deck) in decks.iter_mut().enumerate() {
let n = deck.pop_front().unwrap();
cards.push_back(n);
if n > max.1 {
max = (i, n);
}
}
let i = cards.iter().position(|x| x == &max.1).unwrap();
cards.swap(0, i);
decks[max.0].append(&mut cards);
}
let score = |decks: &Vec<Deck>| {
decks
.iter()
.find(|x| !x.is_empty())
.unwrap()
.iter()
.rev()
.enumerate()
.fold(0, |acc, (i, x)| acc + (i as u64 + 1) * x)
};
println!("{:?}", score(&decks));
// Part 2
let (_, decks) = game_init(initial_decks);
println!("{:?}", score(&decks));
Ok(())
}
type Deck = VecDeque<u64>;
type Round = Vec<Deck>;
type Winner = usize;
fn game_init(decks: Round) -> (Winner, Round) {
game(decks, &mut Vec::new())
}
fn game(decks: Round, prev_rounds: &mut Vec<Round>) -> (Winner, Round) {
let mut decks = decks;
let total_cards: usize = decks.iter().map(|x| x.len()).sum();
let mut winner = 0;
while !decks.iter().any(|x| x.len() == total_cards) {
if prev_rounds.iter().any(|x| deep_equal(x, &decks)) {
winner = 0;
break;
}
prev_rounds.push(decks.clone());
let (winner_round, next_decks) = round(decks);
decks = next_decks;
winner = winner_round;
}
(winner, decks)
}
// This should use a hash function and a HashSet
fn deep_equal(a: &Round, b: &Round) -> bool {
a.iter()
.zip(b)
.all(|(deck_a, deck_b)| deck_a.iter().zip(deck_b).all(|(x, y)| x == y))
}
fn round(decks: Vec<Deck>) -> (Winner, Round) {
let mut decks = decks.clone();
let winner_round = {
if decks.iter().all(|deck| deck.len() > deck[0] as usize) {
let (winner, _) = game(
decks
.iter()
.map(|x| x.iter().skip(1).take(x[0] as usize).cloned().collect())
.collect(),
&mut Vec::new(),
);
winner
} else {
decks.iter().enumerate().max_by_key(|x| x.1).unwrap().0
}
};
let mut cards = Deck::new();
decks[winner_round].rotate_left(1);
for (i, deck) in decks.iter_mut().enumerate() {
if i != winner_round {
cards.push_back(deck.pop_front().unwrap());
}
}
decks[winner_round].append(&mut cards);
(winner_round, decks)
}
|
use libsdp::*;
#[test]
fn write() {
let timing = SdpTiming::new(2345, 2345);
let output = "2345 2345".to_string();
assert_eq!(output, format!("{}", timing));
}
#[test]
fn parse() {
let remains = vec![b'\r'];
let output = SdpTiming::new(2345, 2345);
let timing = "2345 2345\r";
assert_eq!(Ok((remains.as_ref(), output)), parse_timing(timing.as_ref()));
}
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
use super::{Body, Frame};
use crate::error::RSocketError;
use crate::utils::Writeable;
#[derive(Debug, Eq, PartialEq)]
pub struct ResumeOK {
position: u64,
}
pub struct ResumeOKBuilder {
stream_id: u32,
flag: u16,
value: ResumeOK,
}
impl ResumeOKBuilder {
fn new(stream_id: u32, flag: u16) -> ResumeOKBuilder {
ResumeOKBuilder {
stream_id,
flag,
value: ResumeOK { position: 0 },
}
}
pub fn set_position(mut self, position: u64) -> Self {
self.value.position = position;
self
}
pub fn build(self) -> Frame {
Frame::new(self.stream_id, Body::ResumeOK(self.value), self.flag)
}
}
impl ResumeOK {
pub(crate) fn decode(flag: u16, bf: &mut BytesMut) -> crate::Result<ResumeOK> {
if bf.len() < 8 {
Err(RSocketError::InCompleteFrame.into())
} else {
Ok(ResumeOK {
position: bf.get_u64(),
})
}
}
pub fn builder(stream_id: u32, flag: u16) -> ResumeOKBuilder {
ResumeOKBuilder::new(stream_id, flag)
}
pub fn get_position(&self) -> u64 {
self.position
}
}
impl Writeable for ResumeOK {
fn write_to(&self, bf: &mut BytesMut) {
bf.put_u64(self.get_position())
}
fn len(&self) -> usize {
8
}
}
|
pub const DISPLAY_HEIGHT_PX: u8 = 144;
pub const DISPLAY_WIDTH_PX: u8 = 160;
pub const TILE_SIZE_BYTES: usize = 16;
pub const TILE_DATA_TABLE_0_ADDR_START: u16 = 0x8800;
pub const TILE_DATA_TABLE_1_ADDR_START: u16 = 0x8000;
pub const SPRITE_PATTERN_TABLE_ADDR_START: u16 = 0x8000;
pub const SPRITE_ATTRIBUTE_TABLE: u16 = 0xFE00;
pub const BG_NORMAL_ADDR_START: u16 = 0x9800;
pub const BG_WINDOW_ADDR_START: u16 = 0x9C00;
pub const DMG_PALETTE: [(u8, u8, u8); 4] =
[(137, 143, 110), (87, 92, 72), (35, 40, 34), (16, 21, 21)];
|
use crate::ui::components::button_state_image::button_state_image;
use raui_core::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ImageButtonProps {
#[serde(default)]
pub id: String,
#[serde(default)]
pub horizontal_alignment: Scalar,
}
implement_props_data!(ImageButtonProps, "ImageButtonProps");
widget_component! {
pub image_button(key, props) {
widget! {
(#{key} button: {props.clone()} {
content = (button_state_image: {props.clone()})
})
}
}
}
|
use std::fmt::Arguments;
use std::io::Write;
use std::fmt::Debug;
use std::io;
///Method of writing the data log
pub trait LogShape: Debug {
fn unknown<'s, W: Write>(write: W, name: &'static str, display: Arguments<'s>) -> io::Result<()>;
//[UNK] - unknown
fn trace<'s, W: Write>(write: W, line: u32, pos: u32, file: &'static str, args: Arguments<'s>) -> io::Result<()>;
//[TRACE][src/main.rs][38:29] - trace
fn warning<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[WAR] - warning value
fn info<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[INF] - info value
fn error<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[ERR] - err value
fn panic<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[PANIC] - panic program
fn print<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[ERR] - print value
fn eprint<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()>;
//[ERR] - print value
}
impl<'a, A: LogShape> LogShape for &'a A {
#[inline(always)]
fn warning<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::warning(write, display)
}
//[WAR] - warning value
#[inline(always)]
fn info<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::info(write, display)
}
//[INF] - info value
#[inline(always)]
fn error<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::error(write, display)
}
//[ERR] - err value
#[inline(always)]
fn panic<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::panic(write, display)
}
//[PANIC] - panic program
#[inline(always)]
fn unknown<'s, W: Write>(write: W, name: &'static str, display: Arguments<'s>) -> io::Result<()> {
A::unknown(write, name, display)
}
//[UNK] - unknown
#[inline(always)]
fn trace<'s, W: Write>(write: W, line: u32, pos: u32, file: &'static str, args: Arguments<'s>) -> io::Result<()>{
A::trace(write, line, pos, file, args)
}
//[TRACE][src/main.rs][38:29] - trace
#[inline(always)]
fn print<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::print(write, display)
}
//[ERR] - print value
#[inline(always)]
fn eprint<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::eprint(write, display)
}
//[ERR] - print value
}
impl<'a, A: LogShape> LogShape for &'a mut A {
#[inline(always)]
fn warning<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::warning(write, display)
}
//[WAR] - warning value
#[inline(always)]
fn info<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::info(write, display)
}
//[INF] - info value
#[inline(always)]
fn error<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::error(write, display)
}
//[ERR] - err value
#[inline(always)]
fn panic<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::panic(write, display)
}
//[PANIC] - panic program
#[inline(always)]
fn unknown<'s, W: Write>(write: W, name: &'static str, display: Arguments<'s>) -> io::Result<()> {
A::unknown(write, name, display)
}
//[UNK] - unknown
#[inline(always)]
fn trace<'s, W: Write>(write: W, line: u32, pos: u32, file: &'static str, args: Arguments<'s>) -> io::Result<()>{
A::trace(write, line, pos, file, args)
}
//[TRACE][src/main.rs][38:29] - trace
#[inline(always)]
fn print<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::print(write, display)
}
//[ERR] - print value
#[inline(always)]
fn eprint<'s, W: Write>(write: W, display: Arguments<'s>) -> io::Result<()> {
A::eprint(write, display)
}
//[ERR] - print value
}
|
use proconio::{fastout, input};
use std::collections::VecDeque;
#[fastout]
fn main() {
input! {
n: usize,
a_vec: [i64; 3*n],
};
let a_vec: Vec<i64> = a_vec.iter().map(|a| a - 1).collect();
let mut dp: Vec<Vec<i64>> = vec![vec![i64::MIN; n + 1]; n + 1];
let upd = |dp: &mut Vec<Vec<i64>>, i: usize, j: usize, x: i64| {
dp[i][j] = dp[i][j].max(x);
dp[j][i] = dp[j][i].max(x);
dp[i][n] = dp[i][n].max(x);
dp[n][i] = dp[n][i].max(x);
dp[j][n] = dp[j][n].max(x);
dp[n][j] = dp[n][j].max(x);
dp[n][n] = dp[n][n].max(x);
};
upd(&mut dp, a_vec[0] as usize, a_vec[1] as usize, 0);
let mut base = 0;
for i in 0..n - 1 {
let i1 = 2 + 3 * i;
let mut x = a_vec[i1];
let mut y = a_vec[i1 + 1];
let mut z = a_vec[i1 + 2];
if x == y && y == z {
base += 1;
continue;
}
let mut q: VecDeque<(usize, usize, i64)> = VecDeque::new();
for _ in 0..3 {
for j in 0..n + 1 {
let mut now = dp[j][n];
if y == z {
now = now.max(dp[j][y as usize] + 1);
}
q.push_back((j, x as usize, now));
}
let now = std::cmp::max(dp[n][n], dp[z as usize][z as usize] + 1);
q.push_back((x as usize, y as usize, now));
std::mem::swap(&mut x, &mut y);
std::mem::swap(&mut y, &mut z);
}
while let Some(t) = q.pop_front() {
let (i, j, x) = t;
upd(&mut dp, i, j, x);
}
}
let mut ans = dp[n][n];
let l = a_vec[n * 3 - 1] as usize;
ans = ans.max(dp[l][l] + 1);
ans += base;
println!("{}", ans);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.