text stringlengths 8 4.13M |
|---|
#[doc = "Reader of register ENABLED0"]
pub type R = crate::R<u32, super::ENABLED0>;
#[doc = "Reader of field `clk_sys_sram3`"]
pub type CLK_SYS_SRAM3_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_sram2`"]
pub type CLK_SYS_SRAM2_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_sram1`"]
pub type CLK_SYS_SRAM1_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_sram0`"]
pub type CLK_SYS_SRAM0_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_spi1`"]
pub type CLK_SYS_SPI1_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_peri_spi1`"]
pub type CLK_PERI_SPI1_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_spi0`"]
pub type CLK_SYS_SPI0_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_peri_spi0`"]
pub type CLK_PERI_SPI0_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_sio`"]
pub type CLK_SYS_SIO_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_rtc`"]
pub type CLK_SYS_RTC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_rtc_rtc`"]
pub type CLK_RTC_RTC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_rosc`"]
pub type CLK_SYS_ROSC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_rom`"]
pub type CLK_SYS_ROM_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_resets`"]
pub type CLK_SYS_RESETS_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pwm`"]
pub type CLK_SYS_PWM_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_psm`"]
pub type CLK_SYS_PSM_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pll_usb`"]
pub type CLK_SYS_PLL_USB_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pll_sys`"]
pub type CLK_SYS_PLL_SYS_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pio1`"]
pub type CLK_SYS_PIO1_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pio0`"]
pub type CLK_SYS_PIO0_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_pads`"]
pub type CLK_SYS_PADS_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_vreg_and_chip_reset`"]
pub type CLK_SYS_VREG_AND_CHIP_RESET_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_jtag`"]
pub type CLK_SYS_JTAG_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_io`"]
pub type CLK_SYS_IO_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_i2c1`"]
pub type CLK_SYS_I2C1_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_i2c0`"]
pub type CLK_SYS_I2C0_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_dma`"]
pub type CLK_SYS_DMA_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_busfabric`"]
pub type CLK_SYS_BUSFABRIC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_busctrl`"]
pub type CLK_SYS_BUSCTRL_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_adc`"]
pub type CLK_SYS_ADC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_adc_adc`"]
pub type CLK_ADC_ADC_R = crate::R<bool, bool>;
#[doc = "Reader of field `clk_sys_clocks`"]
pub type CLK_SYS_CLOCKS_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 31"]
#[inline(always)]
pub fn clk_sys_sram3(&self) -> CLK_SYS_SRAM3_R {
CLK_SYS_SRAM3_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 30"]
#[inline(always)]
pub fn clk_sys_sram2(&self) -> CLK_SYS_SRAM2_R {
CLK_SYS_SRAM2_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 29"]
#[inline(always)]
pub fn clk_sys_sram1(&self) -> CLK_SYS_SRAM1_R {
CLK_SYS_SRAM1_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 28"]
#[inline(always)]
pub fn clk_sys_sram0(&self) -> CLK_SYS_SRAM0_R {
CLK_SYS_SRAM0_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 27"]
#[inline(always)]
pub fn clk_sys_spi1(&self) -> CLK_SYS_SPI1_R {
CLK_SYS_SPI1_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 26"]
#[inline(always)]
pub fn clk_peri_spi1(&self) -> CLK_PERI_SPI1_R {
CLK_PERI_SPI1_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 25"]
#[inline(always)]
pub fn clk_sys_spi0(&self) -> CLK_SYS_SPI0_R {
CLK_SYS_SPI0_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 24"]
#[inline(always)]
pub fn clk_peri_spi0(&self) -> CLK_PERI_SPI0_R {
CLK_PERI_SPI0_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 23"]
#[inline(always)]
pub fn clk_sys_sio(&self) -> CLK_SYS_SIO_R {
CLK_SYS_SIO_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 22"]
#[inline(always)]
pub fn clk_sys_rtc(&self) -> CLK_SYS_RTC_R {
CLK_SYS_RTC_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 21"]
#[inline(always)]
pub fn clk_rtc_rtc(&self) -> CLK_RTC_RTC_R {
CLK_RTC_RTC_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 20"]
#[inline(always)]
pub fn clk_sys_rosc(&self) -> CLK_SYS_ROSC_R {
CLK_SYS_ROSC_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 19"]
#[inline(always)]
pub fn clk_sys_rom(&self) -> CLK_SYS_ROM_R {
CLK_SYS_ROM_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 18"]
#[inline(always)]
pub fn clk_sys_resets(&self) -> CLK_SYS_RESETS_R {
CLK_SYS_RESETS_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 17"]
#[inline(always)]
pub fn clk_sys_pwm(&self) -> CLK_SYS_PWM_R {
CLK_SYS_PWM_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 16"]
#[inline(always)]
pub fn clk_sys_psm(&self) -> CLK_SYS_PSM_R {
CLK_SYS_PSM_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 15"]
#[inline(always)]
pub fn clk_sys_pll_usb(&self) -> CLK_SYS_PLL_USB_R {
CLK_SYS_PLL_USB_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 14"]
#[inline(always)]
pub fn clk_sys_pll_sys(&self) -> CLK_SYS_PLL_SYS_R {
CLK_SYS_PLL_SYS_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 13"]
#[inline(always)]
pub fn clk_sys_pio1(&self) -> CLK_SYS_PIO1_R {
CLK_SYS_PIO1_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12"]
#[inline(always)]
pub fn clk_sys_pio0(&self) -> CLK_SYS_PIO0_R {
CLK_SYS_PIO0_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11"]
#[inline(always)]
pub fn clk_sys_pads(&self) -> CLK_SYS_PADS_R {
CLK_SYS_PADS_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn clk_sys_vreg_and_chip_reset(&self) -> CLK_SYS_VREG_AND_CHIP_RESET_R {
CLK_SYS_VREG_AND_CHIP_RESET_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9"]
#[inline(always)]
pub fn clk_sys_jtag(&self) -> CLK_SYS_JTAG_R {
CLK_SYS_JTAG_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8"]
#[inline(always)]
pub fn clk_sys_io(&self) -> CLK_SYS_IO_R {
CLK_SYS_IO_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7"]
#[inline(always)]
pub fn clk_sys_i2c1(&self) -> CLK_SYS_I2C1_R {
CLK_SYS_I2C1_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6"]
#[inline(always)]
pub fn clk_sys_i2c0(&self) -> CLK_SYS_I2C0_R {
CLK_SYS_I2C0_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5"]
#[inline(always)]
pub fn clk_sys_dma(&self) -> CLK_SYS_DMA_R {
CLK_SYS_DMA_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4"]
#[inline(always)]
pub fn clk_sys_busfabric(&self) -> CLK_SYS_BUSFABRIC_R {
CLK_SYS_BUSFABRIC_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3"]
#[inline(always)]
pub fn clk_sys_busctrl(&self) -> CLK_SYS_BUSCTRL_R {
CLK_SYS_BUSCTRL_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2"]
#[inline(always)]
pub fn clk_sys_adc(&self) -> CLK_SYS_ADC_R {
CLK_SYS_ADC_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1"]
#[inline(always)]
pub fn clk_adc_adc(&self) -> CLK_ADC_ADC_R {
CLK_ADC_ADC_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0"]
#[inline(always)]
pub fn clk_sys_clocks(&self) -> CLK_SYS_CLOCKS_R {
CLK_SYS_CLOCKS_R::new((self.bits & 0x01) != 0)
}
}
|
use chrono::Datelike;
use serde_derive::{Deserialize, Serialize};
#[derive(PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, Debug, Clone)]
pub(crate) struct CalendarMonth {
year: i32,
month: Month,
}
impl CalendarMonth {
pub fn new(year: i32, month: Month) -> CalendarMonth {
CalendarMonth { year, month }
}
pub fn increment(&mut self) {
match self.month {
Month::Dec => {
self.year += 1;
self.month.increment();
}
_ => self.month.increment(),
}
}
}
impl<T: Datelike> From<T> for CalendarMonth {
fn from(date: T) -> Self {
CalendarMonth {
year: date.year(),
month: date.month().into(),
}
}
}
#[derive(PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, Debug, Clone)]
pub enum Month {
Jan=1,
Feb,
Mar,
Apr,
May,
Jun,
Jul,
Aug,
Sep,
Oct,
Nov,
Dec,
}
impl Month {
pub fn increment(&mut self) {
*self = ((self.clone() as u32 + 1) % 12).into()
}
}
impl<T: Into<u32>> From<T> for Month {
fn from(m: T) -> Month {
match m.into() {
1 => Month::Jan,
2 => Month::Feb,
3 => Month::Mar,
4 => Month::Apr,
5 => Month::May,
6 => Month::Jun,
7 => Month::Jul,
8 => Month::Aug,
9 => Month::Sep,
10 => Month::Oct,
11 => Month::Nov,
12 => Month::Dec,
_ => panic!("there are only 12 months you dingus!"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn less_than() {
assert!(
CalendarMonth {
year: 2018,
month: Month::Jan
} < CalendarMonth {
year: 2018,
month: Month::Feb
}
);
assert!(
CalendarMonth {
year: 2018,
month: Month::Jan
} < CalendarMonth {
year: 2019,
month: Month::Jan
}
);
}
#[test]
fn increment_month() {
let mut m = Month::Jan;
m.increment();
assert_eq!(m, Month::Feb);
m = Month::Dec;
m.increment();
assert_eq!(m, Month::Jan);
}
}
|
use std::collections::BTreeMap;
extern crate rust_htslib;
extern crate bio;
use rust_htslib::bam;
use rust_htslib::bam::Read;
use std::io;
use std::env;
use std::string;
use bio::io::fasta;
const FASTA_SEQS: &'static [ &'static str ] = &[
"1 dna:chromosome chromosome:GRCh37:1:1:249250621:1",
"2 dna:chromosome chromosome:GRCh37:2:1:243199373:1",
"3 dna:chromosome chromosome:GRCh37:3:1:198022430:1",
"4 dna:chromosome chromosome:GRCh37:4:1:191154276:1",
"5 dna:chromosome chromosome:GRCh37:5:1:180915260:1",
"6 dna:chromosome chromosome:GRCh37:6:1:171115067:1",
"7 dna:chromosome chromosome:GRCh37:7:1:159138663:1",
"8 dna:chromosome chromosome:GRCh37:8:1:146364022:1",
"9 dna:chromosome chromosome:GRCh37:9:1:141213431:1",
"10 dna:chromosome chromosome:GRCh37:10:1:135534747:1",
"11 dna:chromosome chromosome:GRCh37:11:1:135006516:1",
"12 dna:chromosome chromosome:GRCh37:12:1:133851895:1",
"13 dna:chromosome chromosome:GRCh37:13:1:115169878:1",
"14 dna:chromosome chromosome:GRCh37:14:1:107349540:1",
"15 dna:chromosome chromosome:GRCh37:15:1:102531392:1",
"16 dna:chromosome chromosome:GRCh37:16:1:90354753:1",
"17 dna:chromosome chromosome:GRCh37:17:1:81195210:1",
"18 dna:chromosome chromosome:GRCh37:18:1:78077248:1",
"19 dna:chromosome chromosome:GRCh37:19:1:59128983:1",
"20 dna:chromosome chromosome:GRCh37:20:1:63025520:1",
"21 dna:chromosome chromosome:GRCh37:21:1:48129895:1",
"22 dna:chromosome chromosome:GRCh37:22:1:51304566:1",
"X dna:chromosome chromosome:GRCh37:X:1:155270560:1",
"Y dna:chromosome chromosome:GRCh37:Y:2649521:59034049:1"
];
fn main() {
// Load BAM file from 1st argument
let mut bamfile: String = string::String::new();
if let Some(arg1) = env::args().nth(1) {
bamfile = arg1;
}
let bam = bam::Reader::new(&bamfile).ok().expect("Error opening bam.");
let mut ratio_counter: BTreeMap<u32, u32> = BTreeMap::new();
let mut idx: u64 = 0;
let mut processed_chr = 9999;
// pileup over all covered sites
for p in bam.pileup() {
let pileup = p.ok().expect("Error reading BAM file.");
if pileup.depth() < 20 {
continue;
}
//println!("{}:{} depth {}", pileup.tid(), pileup.pos(), pileup.depth());
let mut alignment_counter: BTreeMap<u8, u32> = BTreeMap::new();
for alignment in pileup.alignments() {
let record = alignment.record();
if record.is_unmapped() || record.is_duplicate() {
continue;
}
let seq = record.seq();
let base = seq[alignment.qpos()];
match alignment_counter.get_mut(&base) {
Some(value) => { *value += 1; continue; }
None => {}
}
alignment_counter.insert(base, 1);
}
let mut sum: u32 = 0;
let mut max: u32 = 0;
for (base, &number) in alignment_counter.iter() {
sum += number;
if number > max {
max = number;
}
}
if sum > 0 {
idx += 1;
if processed_chr != pileup.tid() {
processed_chr = pileup.tid();
println!("Processing chromosome: {}. Processed positions so far: {}", processed_chr + 1, idx);
}
// Count minority as pct of total
let ratio = (sum - max) * 100 / sum;
match ratio_counter.get_mut(&ratio) {
Some(value) => { *value += 1; continue; }
None => {}
}
ratio_counter.insert(ratio, 1);
}
}
for (ratio, &number) in ratio_counter.iter() {
println!("Ratio {}: {}", ratio, number);
}
}
|
pub struct Solution {}
/**
https://leetcode.com/problems/repeated-string-match/
**/
impl Solution {
pub fn repeated_string_match(a: String, b: String) -> i32 {
let mut s = String::new();
let mut repeated = 0;
let a_str = a.as_str();
let b_str = b.as_str();
let b_len = b.len();
while s.len() < b_len {
s.push_str(a_str);
repeated += 1;
}
if s.contains(b_str) {
repeated
} else {
s.push_str(a_str);
if s.contains(b_str) {
repeated + 1
} else {
-1
}
}
}
} |
/* What is the millionth lexicographic permutation of the digits 0, 1, 2, 3, 4, 5, 6, 7, 8 and 9?
Where lexicographic permutation is defined the ordered sequence of permutations, ie 01, 10, or 012, 021, 102,...
Observe that we may simply find the solution with a factorial function.
*/
fn factorial(a: u8) -> i64 {
if a == 0 {
1 as i64
} else {
let agg: i64 = (1..a as i64 + 1).product();
agg
}
}
#[test]
fn factorial_test() {
for a in 1..10 {
println!("fac test: {}! is {}", a, factorial(a));
}
}
/*
fn facto_string(k: &i64, guess: u8) -> String {
///
let s: String::new();
while{
}
s
}
*/
// turns out that I just did this one by hand.
fn main() {
let mut want: i64 = 1000000;
let mut s = String::from("");
for i in (0..10_u8).rev() {
for _ in 0..10 {
if want - factorial(i - 1) > 0 {
s.push(char::from(i));
want -= factorial(i - 1) * i as i64;
}
}
}
println!("the resultant string was {}", s);
}
|
#![cfg(test)]
use super::*;
use frame_support::{impl_outer_origin, parameter_types, weights::Weight};
use sp_core::H256;
use sp_runtime::{
testing::Header,
traits::{BlakeTwo256, IdentityLookup},
Perbill,
};
impl_outer_origin! {
pub enum Origin for Test where system = frame_system {}
}
#[derive(Clone, Eq, Debug, PartialEq)]
pub struct Test;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
impl frame_system::Trait for Test {
type BaseCallFilter = ();
type Origin = Origin;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Call = ();
type Hashing = BlakeTwo256;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ();
type MaximumExtrinsicWeight = MaximumBlockWeight;
type MaximumBlockLength = MaximumBlockLength;
type AvailableBlockRatio = AvailableBlockRatio;
type Version = ();
type PalletInfo = ();
type AccountData = pallet_balances::AccountData<u64>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
}
parameter_types! {
pub const ExistentialDeposit: u64 = 1;
}
impl pallet_balances::Trait for Test {
type MaxLocks = ();
type Balance = u64;
type DustRemoval = ();
type Event = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type WeightInfo = ();
}
parameter_types! {
pub const ProofLimit: u32 = 1024;
pub const ExpireDuration: u64 = 100;
}
impl Trait for Test {
type Event = ();
type SwapAction = BalanceSwapAction<u64, Balances>;
type ProofLimit = ProofLimit;
}
type System = frame_system::Module<Test>;
type Balances = pallet_balances::Module<Test>;
type AtomicSwap = Module<Test>;
const A: u64 = 1;
const B: u64 = 2;
pub fn new_test_ext() -> sp_io::TestExternalities {
let mut t = frame_system::GenesisConfig::default().build_storage::<Test>().unwrap();
let genesis = pallet_balances::GenesisConfig::<Test> { balances: vec![(A, 100), (B, 200)] };
genesis.assimilate_storage(&mut t).unwrap();
t.into()
}
#[test]
fn two_party_successful_swap() {
let mut chain1 = new_test_ext();
let mut chain2 = new_test_ext();
// A generates a random proof. Keep it secret.
let proof: [u8; 2] = [4, 2];
// The hashed proof is the blake2_256 hash of the proof. This is public.
let hashed_proof = blake2_256(&proof);
// A creates the swap on chain1.
chain1.execute_with(|| {
AtomicSwap::create_swap(
Origin::signed(A),
B,
hashed_proof.clone(),
BalanceSwapAction::new(50),
1000,
)
.unwrap();
assert_eq!(Balances::free_balance(A), 100 - 50);
assert_eq!(Balances::free_balance(B), 200);
});
// B creates the swap on chain2.
chain2.execute_with(|| {
AtomicSwap::create_swap(
Origin::signed(B),
A,
hashed_proof.clone(),
BalanceSwapAction::new(75),
1000,
)
.unwrap();
assert_eq!(Balances::free_balance(A), 100);
assert_eq!(Balances::free_balance(B), 200 - 75);
});
// A reveals the proof and claims the swap on chain2.
chain2.execute_with(|| {
AtomicSwap::claim_swap(Origin::signed(A), proof.to_vec(), BalanceSwapAction::new(75))
.unwrap();
assert_eq!(Balances::free_balance(A), 100 + 75);
assert_eq!(Balances::free_balance(B), 200 - 75);
});
// B use the revealed proof to claim the swap on chain1.
chain1.execute_with(|| {
AtomicSwap::claim_swap(Origin::signed(B), proof.to_vec(), BalanceSwapAction::new(50))
.unwrap();
assert_eq!(Balances::free_balance(A), 100 - 50);
assert_eq!(Balances::free_balance(B), 200 + 50);
});
}
|
/*
zk-paillier
Copyright 2018 by Kzen Networks
zk-paillier is free software: you can redistribute
it and/or modify it under the terms of the GNU General Public
License as published by the Free Software Foundation, either
version 3 of the License, or (at your option) any later version.
@license GPL-3.0+ <https://github.com/KZen-networks/zk-paillier/blob/master/LICENSE>
*/
mod wi_dlog_proof;
pub use self::wi_dlog_proof::*;
// mod correct_opening;
// pub use self::correct_opening::CorrectOpening;
// pub use self::correct_key::Challenge;
// pub use self::correct_key::CorrectKeyProof;
// pub use self::correct_key::VerificationAid;
// mod correct_key;
// pub use self::correct_key::CorrectKey;
mod correct_key_ni;
pub use self::correct_key_ni::CorrectKeyProofError;
pub use self::correct_key_ni::NICorrectKeyProof;
// mod range_proof;
// pub use self::range_proof::RangeProof;
// pub use self::range_proof::RangeProofTrait;
// pub use self::range_proof::ChallengeBits;
// pub use self::range_proof::EncryptedPairs;
// pub use self::range_proof::Proof;
// mod range_proof_ni;
// pub use self::range_proof_ni::RangeProofError;
// pub use self::range_proof_ni::RangeProofNi;
// mod correct_message;
// pub use self::correct_message::CorrectMessageProof;
// pub use self::correct_message::CorrectMessageProofError;
// use curv::BigInt;
use crate::BigInt;
use std::borrow::Borrow;
use digest::Digest;
use sha2::Sha256;
pub fn compute_digest<IT>(it: IT) -> BigInt
where
IT: Iterator,
IT::Item: Borrow<BigInt>,
{
let mut hasher = Sha256::new();
for value in it {
let bytes: Vec<u8> = value.borrow().into();
hasher.update(&bytes);
}
let result_hex = hasher.finalize();
BigInt::from(&result_hex[..])
}
|
//! Transport from SQLite Source to Arrow2 Destination.
use crate::{
destinations::arrow2::{
typesystem::Arrow2TypeSystem, Arrow2Destination, Arrow2DestinationError,
},
impl_transport,
sources::sqlite::{SQLiteSource, SQLiteSourceError, SQLiteTypeSystem},
typesystem::TypeConversion,
};
use chrono::{NaiveDate, NaiveDateTime, NaiveTime};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum SQLiteArrow2TransportError {
#[error(transparent)]
Source(#[from] SQLiteSourceError),
#[error(transparent)]
Destination(#[from] Arrow2DestinationError),
#[error(transparent)]
ConnectorX(#[from] crate::errors::ConnectorXError),
}
/// Convert SQLite data types to Arrow2 data types.
pub struct SQLiteArrow2Transport;
impl_transport!(
name = SQLiteArrow2Transport,
error = SQLiteArrow2TransportError,
systems = SQLiteTypeSystem => Arrow2TypeSystem,
route = SQLiteSource => Arrow2Destination,
mappings = {
{ Bool[bool] => Boolean[bool] | conversion auto }
{ Int8[i64] => Int64[i64] | conversion auto }
{ Int4[i32] => Int64[i64] | conversion auto }
{ Int2[i16] => Int64[i64] | conversion auto }
{ Real[f64] => Float64[f64] | conversion auto }
{ Text[Box<str>] => LargeUtf8[String] | conversion option }
{ Blob[Vec<u8>] => LargeBinary[Vec<u8>] | conversion auto }
{ Date[NaiveDate] => Date32[NaiveDate] | conversion auto }
{ Time[NaiveTime] => Time64[NaiveTime] | conversion auto }
{ Timestamp[NaiveDateTime] => Date64[NaiveDateTime] | conversion auto }
}
);
impl TypeConversion<Box<str>, String> for SQLiteArrow2Transport {
fn convert(val: Box<str>) -> String {
val.to_string()
}
}
|
extern crate gcc;
fn main() {
let mut cfg = gcc::Config::new();
cfg.file("src/task_info.c");
cfg.compile("libtask_info.a");
}
|
use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::serde::{Deserialize, Serialize};
use near_sdk::serde_json::json;
use near_sdk::{env, json_types::U128, near_bindgen, AccountId, Promise, *};
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize)]
pub struct MakeWallets {}
impl Default for MakeWallets {
fn default() -> Self {
Self {}
}
}
const GAS: u64 = 40_000_000_000_000;
#[derive(Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct NewAccount {
account_id: AccountId,
public_key: PublicKey,
initial_amount: U128,
}
#[ext_contract(ext_self)]
pub trait ExtMakeWallets {
fn on_account_created(#[callback] val: bool) -> bool;
}
#[near_bindgen]
impl MakeWallets {
#[payable]
pub fn make_wallets(new_account: NewAccount) {
Promise::new("near".parse().unwrap()).function_call(
"create_account".to_string(),
json!({"new_account_id": new_account.account_id.to_string(), "new_public_key": new_account.public_key}).to_string().as_bytes().to_vec(),
new_account.initial_amount.0, //initial deposit
GAS.into()
).then(ext_self::on_account_created(env::current_account_id(), 0, GAS.into()));
}
#[private]
pub fn on_account_created(#[callback] val: bool) {
match val {
true => env::log_str("account was created successfully"),
false => env::log_str("error during account creation"),
}
}
}
#[cfg(not(target_arch = "wasm32"))]
#[cfg(test)]
mod tests {
// use super::*;
// use near_sdk::{testing_env, VMContext};
// fn get_context(input: Vec<u8>, is_view: bool) -> VMContext {
// VMContext {
// current_account_id: "alice_near".to_string(),
// signer_account_id: "bob_near".to_string(),
// signer_account_pk: vec![0, 1, 2],
// predecessor_account_id: "carol_near".to_string(),
// input,
// block_index: 0,
// block_timestamp: 0,
// account_balance: 0,
// account_locked_balance: 0,
// storage_usage: 0,
// attached_deposit: 0,
// prepaid_gas: 10u64.pow(18),
// random_seed: vec![0, 1, 2],
// is_view,
// output_data_receivers: vec![],
// epoch_height: 0,
// }
// }
// #[test]
// fn create_accounts() {
// let context = get_context(vec![], false);
// testing_env!(context);
// let mut contract = MakeWallets::default();
// }
}
|
use std::char;
use std::string::String;
use std::iter::Iterator;
use std::collections::HashSet;
#[derive(Debug)]
pub struct Token {
str: String,
line_num: usize,
line_pos: usize,
}
#[derive(Debug)]
pub struct Scanner<'a, 'b> {
file: &'a Vec<char>,
cur_pos: usize,
line_num: usize,
cur_line_start_pos: usize,
operators: &'b HashSet<String>,
}
impl<'a, 'b> Scanner<'a, 'b> {
pub fn new(file: &'a Vec<char>, op: &'b HashSet<String>) -> Scanner<'a, 'b> {
Scanner {
file,
cur_pos: 0,
line_num: 0,
cur_line_start_pos: 0,
operators: op,
}
}
fn cur_ch(&self) -> char {
match self.file.get(self.cur_pos) {
Some(v) => {
return *v;
}
None => return '\0'
}
}
fn next_ch(&self) -> char {
match self.file.get(self.cur_pos + 1) {
Some(v) => return *v,
None => return '\0'
}
}
fn make_token(&mut self, start_pos: usize, length: usize) -> Option<Token> {
if length == 0 {
return None;
}
let res = self.file.iter().skip(start_pos).take(length);
return Some(Token {
str: res.collect(),
line_num: self.line_num,
line_pos: start_pos - self.cur_line_start_pos,
});
}
fn parse_string(&mut self) -> Option<Token> {
let start_pos = self.cur_pos;
if self.cur_ch() == '"' {
self.cur_pos += 1;
}
while self.cur_ch() != '"' && self.next_ch() == '\0' {
match self.cur_ch() {
'\0' => break,
'\\' => self.cur_pos += 2,
_ => self.cur_pos += 1
}
}
return self.make_token(start_pos, self.cur_pos - start_pos);
}
fn parse_unit(&mut self) -> Option<Token> {
let start_pos = self.cur_pos;
while (self.cur_ch()).is_alphanumeric() || self.cur_ch() == '_' {
self.cur_pos += 1;
}
let length = self.cur_pos - start_pos;
if length > 254 || length > 63 {
println!("The Unit Is Too Long");
}
return self.make_token(start_pos, length);
}
fn parse_number(&mut self) -> Option<Token> {
let start_pos = self.cur_pos;
// 仅处理 10 进制
while (self.cur_ch()).is_digit(10) {
self.cur_pos += 1;
}
if self.cur_ch() == '.' {
self.cur_pos += 1;
if !self.cur_ch().is_digit(10) {
return self.make_token(start_pos, self.cur_pos - start_pos);
}
while self.cur_ch().is_digit(10) {
self.cur_pos += 1;
}
}
return self.make_token(start_pos, self.cur_pos - start_pos);
}
}
impl<'a, 'b> Iterator for Scanner<'a, 'b> {
type Item = Token;
fn next(&mut self) -> Option<Token> {
let skip_comm = |scanner: &mut Scanner| {
if scanner.cur_ch() == '/' && scanner.cur_ch() == '/' {
while scanner.cur_ch() != '\n' {
scanner.cur_pos += 1;
}
}
};
// 跳过空格或注释
skip_comm(self);
while self.cur_ch().is_ascii_whitespace() {
if self.cur_ch() == '\n' {
self.line_num += 1;
self.cur_line_start_pos = self.cur_pos;
}
self.cur_pos += 1;
skip_comm(self);
}
// 读取完毕
match self.cur_ch() {
'\0' => return None,
_ => ()
}
if self.cur_ch().is_alphabetic() || self.cur_ch().is_ascii_whitespace() {
return self.parse_unit();
} else if self.cur_ch().is_digit(10) {
return self.parse_number();
} else if self.cur_ch() == '"' {
return self.parse_string();
}
// 运算符解析
let start_pos = self.cur_pos;
// 处理操作符
while self.cur_ch().is_ascii_punctuation() {
self.cur_pos += 1;
}
loop {
let res = self.make_token(start_pos, self.cur_pos - start_pos);
match res {
Some(v) => {
if self.operators.contains(&v.str) || v.str.len() == 1 {
return Some(v);
} else {
self.cur_pos -= 1;
}
}
None => return None
}
}
}
} |
use crate::linalg::*;
use std::mem::{self, MaybeUninit};
#[derive(Clone,Copy,PartialEq,Eq,Debug)]
pub enum Color {Yellow, Red, Blue, White, Orange, Green}
#[derive(Clone,Copy,PartialEq,Eq,Debug)]
pub enum Direction {CW, CCW}
#[derive(Clone,Copy,PartialEq,Eq,Debug)]
pub enum Face {Front, Back, Left, Right, Up, Down}
#[derive(Debug)]
pub enum Piece {
Center (Vector, Color),
Edge ([Vector; 2], [Color; 2]),
Corner ([Vector; 3], [Color; 3]),
Middle,
}
#[derive(Debug)]
pub struct Cube {
pieces: [Piece; 27],
rotation: Matrix,
}
#[derive(Debug)]
pub struct CubeFace {
squares: [Color; 9],
}
impl std::ops::Index<(usize, usize)> for CubeFace {
type Output = Color;
fn index(&self, i: (usize, usize)) -> &Self::Output {
&self.squares[i.0 + i.1*3]
}
}
impl CubeFace {
fn white() -> Self {
Self{squares: [Color::White; 9]}
}
fn set(&mut self, c: (usize, usize), col: Color) {
self.squares[c.0 + c.1*3] = col;
}
}
impl std::ops::Not for Direction {
type Output = Self;
fn not(self) -> Self::Output {
match self {
Self::CW => Self::CCW,
Self::CCW => Self::CW,
}
}
}
impl Face {
fn direction(&self) -> Vector {
match self {
Self::Front => Vector::new(0, -1, 0),
Self::Back => Vector::new(0, 1, 0),
Self::Left => Vector::new(-1, 0, 0),
Self::Right => Vector::new(1, 0, 0),
Self::Up => Vector::new(0, 0, 1),
Self::Down => Vector::new(0, 0, -1),
}
}
fn rotated_face(&self, rot: &Matrix) -> Option<Self> {
let rotated = rot * &self.direction();
Self::from_direction(&rotated)
}
fn from_direction(v: &Vector) -> Option<Self> {
match v.as_ref() {
[0, -1, 0] => Some(Face::Front),
[0, 1, 0] => Some(Face::Back),
[-1, 0, 0] => Some(Face::Left),
[1, 0, 0] => Some(Face::Right),
[0, 0, 1] => Some(Face::Up),
[0, 0, -1] => Some(Face::Down),
_ => None
}
}
fn initial_color(&self) -> Color {
match self {
Self::Front => Color::Green,
Self::Back => Color::Blue,
Self::Left => Color::Orange,
Self::Right => Color::Red,
Self::Up => Color::White,
Self::Down => Color::Yellow,
}
}
fn iter() -> impl Iterator<Item=Self> {
vec![Self::Front, Self::Back, Self::Left, Self::Right, Self::Up, Self::Down]
.into_iter()
}
fn rotation_matrix(&self, dir: Direction) -> Matrix {
let cw = dir == Direction::CW;
match self {
Self::Front => Matrix::rotation_y(!cw),
Self::Back => Matrix::rotation_y(cw),
Self::Left => Matrix::rotation_x(!cw),
Self::Right => Matrix::rotation_x(cw),
Self::Down => Matrix::rotation_z(!cw),
Self::Up => Matrix::rotation_z(cw),
}
}
fn edge_orth(&self) -> Vector {
match self {
Self::Front | Self::Back | Self::Left | Self::Right => Vector::new(0, 0, 1),
Self::Up => Vector::new(0, 1, 0),
Self::Down => Vector::new(0, -1, 0),
}
}
fn corner_orth(&self) -> Vector {
match self {
Self::Front => Vector::new(-1, 0, 1),
Self::Right => Vector::new(0, -1, 1),
Self::Left => Vector::new(0, 1, 1),
Self::Back => Vector::new(1, 0, 1),
Self::Up => Vector::new(-1, 1, 0),
Self::Down => Vector::new(-1, -1, 0),
}
}
}
impl Color {
pub fn short(&self) -> &'static str {
match self {
Self::Yellow => "y",
Self::Red => "r",
Self::Blue => "b",
Self::Green => "g",
Self::White => "w",
Self::Orange => "o",
}
}
}
impl Piece {
fn coordinate(&self) -> Vector {
match self {
Self::Middle => Vector::new(0, 0, 0),
Self::Center(v, _) => v.clone(),
Self::Edge([v1, v2], _) => v1 + v2,
Self::Corner([v1, v2, v3], _) => {
let mut r = v1.clone();
r.add_vec(v2).add_vec(v3);
r
}
}
}
fn color(&self, dir: &Vector) -> Option<Color> {
match self {
Self::Center(v, c) if dir == v => Some(*c),
Self::Edge([v, _], [c, _]) if dir == v => Some(*c),
Self::Edge([_, v], [_, c]) if dir == v => Some(*c),
Self::Corner([v, _, _], [c, _, _]) if dir == v => Some(*c),
Self::Corner([_, v, _], [_, c, _]) if dir == v => Some(*c),
Self::Corner([_, _, v], [_, _, c]) if dir == v => Some(*c),
_ => None,
}
}
fn rotate(&mut self, rot: &Matrix) {
match self {
Self::Middle => (),
Self::Center(v, _) => *v = rot * &*v,
Self::Edge([v1, v2], _) => {
*v1 = rot * &*v1;
*v2 = rot * &*v2;
},
Self::Corner([v1, v2, v3], _) => {
*v1 = rot * &*v1;
*v2 = rot * &*v2;
*v3 = rot * &*v3;
}
}
}
}
impl Cube {
pub fn new() -> Self {
let mut p: [MaybeUninit<Piece>; 27] = unsafe {MaybeUninit::uninit().assume_init()};
let mut b = [false; 27];
for pie in Self::generate_pieces().into_iter() {
let c = Self::piece_index(&pie.coordinate());
p[c] = MaybeUninit::new(pie);
b[c] = true;
}
if !b.iter().all(|x| *x) {
panic!("pieces was not all initialized!");
}
Self{
pieces: unsafe{mem::transmute::<_, [Piece; 27]>(p)},
rotation: Matrix::diag(),
}
}
fn generate_pieces() -> Vec<Piece> {
let mut res = Vec::with_capacity(27);
res.push(Piece::Middle);
// fill centers
for f in Face::iter() {
res.push(Piece::Center(f.direction(), f.initial_color()));
}
// fill edges
let trans = Matrix::rotation_x(true);
for x in &[-1, 1, 0] {
let mut rotator = Vector::new(*x, if *x == 0 {1} else {0}, 1);
for _ in 0..4 {
rotator = &trans * &rotator;
let comp = rotator.components2();
let colors = [
Face::from_direction(&comp[0]).unwrap().initial_color(),
Face::from_direction(&comp[1]).unwrap().initial_color()
];
res.push(Piece::Edge(comp, colors));
}
}
// fill corners
for x in &[-1, 1] {
let mut rotator = Vector::new(*x, 1, 1);
for _ in 0..4 {
rotator = &trans * &rotator;
let comp = rotator.components();
let colors = [
Face::from_direction(&comp[0]).unwrap().initial_color(),
Face::from_direction(&comp[1]).unwrap().initial_color(),
Face::from_direction(&comp[2]).unwrap().initial_color()
];
res.push(Piece::Corner(comp, colors));
}
}
res
}
fn piece_index(v: &Vector) -> usize {
((v.x()+1)*9 + (v.y()+1)*3 + (v.z()+1)) as usize
}
pub fn face(&self, f: Face) -> CubeFace {
let mut cf = CubeFace::white();
let dir = f.direction();
let rdir = &self.rotation * &dir;
let rot = &(&self.rotation * &f.rotation_matrix(Direction::CW))
* &self.rotation.inverse().expect("det wasn't 1");
let mut corner = f.corner_orth();
corner.add_vec(&dir);
corner = &self.rotation * &corner;
for c in &[(0, 0), (2, 0), (2, 2), (0, 2)] {
cf.set(*c, self[&corner].color(&rdir).unwrap());
corner = &rot * &corner;
}
let mut edge = f.edge_orth();
edge.add_vec(&dir);
edge = &self.rotation * &edge;
for c in &[(1, 0), (2, 1), (1, 2), (0, 1)] {
cf.set(*c, self[&edge].color(&rdir).unwrap());
edge = &rot * &edge;
}
cf.set((1, 1), self[&rdir].color(&rdir).unwrap());
cf
}
pub fn print_ascii(&self) {
fn row(f: &CubeFace, r: usize, i: usize) {
print!(
"{:<1$}{2} {3} {4}",
"",
i,
f[(0, r)].short(),
f[(1, r)].short(),
f[(2, r)].short()
);
}
let up = self.face(Face::Up);
for i in 0..3 {
row(&up, i, 8);
println!("");
}
println!("{:<8}{:-<5}", "", "");
let left = self.face(Face::Left);
let front = self.face(Face::Front);
let right = self.face(Face::Right);
let back = self.face(Face::Back);
for i in 0..3 {
row(&left, i, 0);
print!(" | ");
row(&front, i, 0);
print!(" | ");
row(&right, i, 0);
print!(" | ");
row(&back, i, 0);
println!("");
}
println!("{:<8}{:-<5}", "", "");
let down = self.face(Face::Down);
for i in 0..3 {
row(&down, i, 8);
println!("");
}
}
pub fn rotate(&mut self, face: Face, dir: Direction) {
let rot = face.rotated_face(&self.rotation).unwrap().rotation_matrix(!dir);
self.rotation = &rot * &self.rotation;
}
fn swap(&mut self, v1: &Vector, v2: &Vector) {
let i1 = Self::piece_index(v1);
let i2 = Self::piece_index(v2);
self.pieces.swap(i1, i2);
}
pub fn turn(&mut self, face: Face, dir: Direction) {
let base = face.direction();
let rot = &(&self.rotation * &face.rotation_matrix(dir))
* &self.rotation.inverse().expect("det wasn't 1");
let rotn_org = face.rotation_matrix(!dir);
let rotn = &(&self.rotation * &rotn_org)
* &self.rotation.inverse().expect("det wasn't 1");
let mut corner1 = face.corner_orth();
let mut corner2 = &rotn_org * &corner1;
let mut edge1 = face.edge_orth();
let mut edge2 = &rotn_org * &edge1;
corner1.add_vec(&base);
corner2.add_vec(&base);
edge1.add_vec(&base);
edge2.add_vec(&base);
corner1 = &self.rotation * &corner1;
corner2 = &self.rotation * &corner2;
edge1 = &self.rotation * &edge1;
edge2 = &self.rotation * &edge2;
self[&corner1].rotate(&rot);
self[&edge1].rotate(&rot);
for _ in 0..3 {
self[&corner2].rotate(&rot);
self.swap(&corner1, &corner2);
corner1 = &rotn * &corner1;
corner2 = &rotn * &corner2;
self[&edge2].rotate(&rot);
self.swap(&edge1, &edge2);
edge1 = &rotn * &edge1;
edge2 = &rotn * &edge2;
}
}
pub fn turns(&mut self, turns: &str) -> Result<(), ()> {
for m in turns.split(" ") {
if m == "" {
continue;
}
let chars: Vec<char> = m.chars().collect();
if chars.len() > 2 {
return Err(());
}
let rotation = match chars[0] {
'X' | 'Y' | 'Z' => true,
_ => false,
};
let face = match chars[0] {
'R' => Face::Right,
'L' => Face::Left,
'U' => Face::Up,
'D' => Face::Down,
'B' => Face::Back,
'F' => Face::Front,
'X' => Face::Right,
'Y' => Face::Up,
'Z' => Face::Front,
_ => return Err(()),
};
let dir = if chars.len() == 2 && chars[1] == '\'' {
Direction::CCW
}
else{
Direction::CW
};
let time = if chars.len() == 2 && chars[1] == '2' {
2
} else {
1
};
for _ in 0..time {
if rotation {
self.rotate(face, dir);
} else {
self.turn(face, dir);
}
}
}
Ok(())
}
}
impl std::ops::Index<&Vector> for Cube {
type Output = Piece;
fn index(&self, rc: &Vector) -> &Self::Output {
&self.pieces[Cube::piece_index(rc)]
}
}
impl std::ops::IndexMut<&Vector> for Cube {
fn index_mut(&mut self, rc: &Vector) -> &mut Self::Output {
&mut self.pieces[Cube::piece_index(rc)]
}
}
|
pub use linux_macros::{kstr_pub as kstr};
use crate::kty::{c_char};
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct KStr(*const c_char);
impl KStr {
pub unsafe fn new(ptr: *const c_char) -> KStr {
KStr(ptr)
}
pub fn as_ptr(self) -> *const c_char {
self.0
}
}
|
use std;
use std::io::Read;
use rustc_serialize::json;
#[derive(RustcDecodable)]
pub struct Credentials {
pub username: String,
pub password: String,
}
pub fn root_dir() -> std::path::PathBuf {
let mut path = std::env::home_dir().unwrap();
path.push(".config/neubauten/");
return path;
}
pub fn spotify_path() -> String {
let mut path = std::env::home_dir().unwrap();
path.push(".config/neubauten/tmp/");
return String::from(path.to_str().unwrap());
}
pub fn read_credentials() -> Credentials {
let mut configuration_path = root_dir();
configuration_path.push("init.json");
let path = configuration_path.to_str().unwrap();
match std::fs::File::open(path) {
Ok(mut file) => {
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
return json::decode(&data).unwrap();
},
Err(_) => panic!("No configuration file found, aborting..."),
}
}
|
//!
//! Test whether, when using bb8_diesel, database queries starve execution of
//! the rest of the current task. We'd expect so, based on the docs. This
//! example shows the behavior using `tokio::select!`.
//!
use bb8_diesel_test::sleep_using_db_pool;
use bb8_diesel_test::sleep_using_tokio;
use std::time::Duration;
static DATABASE_URL: &str = "postgresql://root@127.0.0.1:32221?sslmode=disable";
#[tokio::main]
async fn main() {
let manager: bb8_diesel::DieselConnectionManager<diesel::pg::PgConnection> =
bb8_diesel::DieselConnectionManager::new(DATABASE_URL);
let pool = bb8::Pool::builder()
.connection_timeout(Duration::from_secs(3))
.build(manager)
.await
.unwrap();
// bb8 happily completes this step successfully even when it failed to
// connect to the database. Let's catch that and report a better error.
{
eprintln!("setting up pool for database {:?}", DATABASE_URL);
let _ = pool.get().await.unwrap_or_else(|_| {
panic!("failed to connect to database at {:?}", DATABASE_URL)
});
}
eprintln!(
"TEST ONE: Issue two `tokio::time::sleep` calls using `tokio::select!`."
);
eprintln!(
"Expected behavior: Only the shorter sleep completes. It takes \n\
the expected amount of time. (The other sleep is cancelled.)"
);
tokio::select! {
_ = sleep_using_tokio(1, Duration::from_millis(500)) => {}
_ = sleep_using_tokio(2, Duration::from_millis(300)) => {}
};
eprintln!(
"\n\
TEST TWO: Issue a `tokio::time::sleep` call and a database sleep call\n\
using `tokio::select!`.\n\
Expected behavior: We always wait the duration of the database sleep,\n\
even though it's longer than the other sleep.\n\
(ideal behavior: the shorter sleep completes first)"
);
tokio::select! {
_ = sleep_using_db_pool(3, &pool, Duration::from_millis(500)) => {}
_ = sleep_using_tokio(4, Duration::from_millis(300)) => {}
};
}
|
use std::ops::BitXor;
use std::cmp;
use std::iter;
use num_rational::Ratio;
pub fn block_xor<T>(block1: &[T], block2: &[T]) -> Vec<T::Output>
where T: BitXor + Clone + Copy {
let block_size = cmp::min(block1.len(), block2.len());
let mut new_block = Vec::with_capacity(block_size);
for i in 0..block_size {
new_block.push(block1[i] ^ block2[i]);
}
new_block
}
pub fn exact_block_xor<T>(block1: &[T], block2: &[T]) -> Option<Vec<T::Output>>
where T: BitXor + Clone + Copy {
if block1.len() != block2.len() {
return None;
}
Some(block_xor(block1, block2))
}
fn repeat_char(character: u8, n: usize) -> Vec<u8> {
iter::repeat(character).take(n).collect()
}
fn op_with_char<F>(op: F, character: u8, string: &[u8]) -> Vec<u8>
where F: Fn(&[u8], &[u8]) -> Vec<u8> {
let other_string = repeat_char(character, string.len());
op(string, &other_string)
}
pub fn xor_with_char(character: u8, string: &[u8]) -> Vec<u8> {
op_with_char(&block_xor, character, string)
}
#[allow(unused_variables)]
fn with_key(key: &[u8], string: &[u8]) -> Vec<u8> {
let rep_count = string.len() / key.len();
let remainder = string.len() % key.len();
let mut vec = Vec::new();
assert!(string.len() == rep_count*key.len() + remainder);
for i in 0..rep_count {
for ch in key {
vec.push(*ch);
}
}
for item in key.iter().take(remainder) {
vec.push(*item);
}
vec
}
fn op_with_key<F>(op: F, key: &[u8], string: &[u8]) -> Vec<u8>
where F: Fn(&[u8], &[u8]) -> Vec<u8> {
let other_string = with_key(key, string);
op(string, other_string.as_ref())
}
pub fn xor_with_key(key: &[u8], string: &[u8]) -> Vec<u8> {
op_with_key(&block_xor, key, string)
}
pub fn edit_distance(str1: &[u8], str2: &[u8]) -> Option<usize> {
let hmetric = |x, y| {
let mut val = x ^ y;
let mut dist = 0;
while val != 0 {
dist += 1;
val &= val - 1;
}
dist
};
if str1.len() != str2.len() {
return None;
}
let mut dist = 0;
for (byte1, byte2) in str1.into_iter().zip(str2) {
dist += hmetric(byte1, byte2);
}
Some(dist)
}
pub fn normalized_edit_distance(str1: &[u8], str2: &[u8]) -> Option<Ratio<usize>> {
let dist = edit_distance(str1, str2);
match dist {
None => None,
Some(edit_dist) => Some(Ratio::new_raw(edit_dist, str1.len())),
}
}
fn total_edit_distance(seq1: &[&[u8]], seq2: &[&[u8]]) -> Option<usize> {
if seq1.is_empty() || seq2.is_empty() {
return None;
}
if seq1.len() != seq2.len() {
return None;
}
for (str1, str2) in seq1.into_iter().zip(seq2) {
if str1.len() != str2.len() {
return None;
}
}
let edit_dist = seq1.into_iter().zip(seq2).fold(0, |dist, (str1, str2)| {
dist + edit_distance(str1, str2).unwrap()
});
Some(edit_dist)
}
pub fn mean_edit_distance(strings: &[&[u8]]) -> Option<Ratio<usize>> {
let length = |strings: &[&[u8]]| {
strings.into_iter()
.fold(0, |acc, st| { acc + st.len() })
};
let seq1: Vec<&[u8]> = strings.into_iter()
.take(strings.len()-1)
.map(|slice| { *slice })
.collect();
let seq2: Vec<&[u8]> = strings.into_iter()
.skip(1)
.map(|slice| { *slice })
.collect();
let dist = total_edit_distance(seq1.as_ref(), seq2.as_ref());
let total = length(strings);
match dist {
None => None,
Some(val) => Some(Ratio::new_raw(val, total)),
}
}
|
//mod level;
//use level;
use graphics::{self, Transformed};
use opengl_graphics::GlGraphics;
use piston::event::*;
use piston::input::keyboard::Key;
use level::Level;
const DARK : [f32; 4] = [0.4, 0.4, 0.4, 0.8];
const WHITE: [f32; 4] = [0.8, 0.8, 0.8, 0.8];
pub struct Game {
level: Level,
player: Player,
//timers: Timers,
}
impl Game {
pub fn new(level: i32) -> Game {
Game {
level: Level::load(0),
player: Player::new(1, 1)
}
}
pub fn render(&mut self, c: graphics::context::Context, gl: &mut GlGraphics) {
use graphics::*;
let square = rectangle::square(0.0, 0.0, 30.0);
//let mut map = Map { field: self.map.field.clone() }; // pretty ugly
//let mut player = Player { x: self.player.x, y: self.player.y };//Player { self.};
//gl.draw(c.viewport(), |c, gl| {
clear([0.0, 0.0, 0.0, 0.8], gl);
for row in 0..6 {
for col in 0..6 {
let tr = c.transform.trans(col as f64 * 30.0, row as f64 * 30.0); //row is shift by Y, col - X
match self.level.cell(col, row) {
Some(0) => { rectangle(DARK, square, tr, gl) },
Some(1) => { rectangle(WHITE, square, tr, gl) },
Some(2) => { rectangle([1.0; 4], square, tr, gl) },
_ => { rectangle(color::hex("ff0000"), square, tr, gl) }
}
}
}
let tr = c.transform.trans(self.player.x as f64 * 30.0, self.player.y as f64 * 30.0);
rectangle(WHITE, square, tr, gl);
//})
}
pub fn update(&mut self, dt: f64) {
}
pub fn key_press(&mut self, k: Key) {
match k {
Key::Left if self.player.can_move(Direction::Left, &self.level) => { self.player.x -= 1; },
Key::Right if self.player.can_move(Direction::Right, &self.level) => { self.player.x += 1; },
Key::Up if self.player.can_move(Direction::Up, &self.level) => { self.player.y -= 1; },
Key::Down if self.player.can_move(Direction::Down, &self.level) => { self.player.y += 1; },
_ => {},
}
}
}
pub struct Player { // A white square!
x: i32,
y: i32,
}
impl Player {
fn new(x: i32, y: i32) -> Player {
Player { x: x, y: y }
}
fn can_move(&self, dir: Direction, level: &Level) -> bool { //Result<(), ()> {
match dir {
Direction::Left if level.cell(self.x - 1, self.y) == Some(0) => { true },
Direction::Right if level.cell(self.x + 1, self.y) == Some(0) => { true },
Direction::Up if level.cell(self.x, self.y - 1) == Some(0) => { true },
Direction::Down if level.cell(self.x, self.y + 1) == Some(0) => { true },
_ => { false }
}
}
//fn render(/*need field here*/)
}
enum Direction {
Left,
Right,
Up,
Down,
}
|
use crate::{
opcode::{execute_alu_8, execute_pop_16, execute_push_16, LoadOperand8, Opcode, Prefix},
smallnum::{U1, U2, U3},
tables::{
lookup16_r12, lookup8_r12, F3F5_TABLE, HALF_CARRY_ADD_TABLE, HALF_CARRY_SUB_TABLE,
SZF3F5_TABLE, SZPF3F5_TABLE,
},
RegName16, RegName8, Regs, Z80Bus, FLAG_CARRY, FLAG_F3, FLAG_F5, FLAG_HALF_CARRY, FLAG_PV,
FLAG_SIGN, FLAG_SUB, FLAG_ZERO, Z80,
};
#[derive(Clone, Copy)]
pub enum FlagsCondition {
NonZero,
Zero,
NonCarry,
Carry,
ParityOdd,
ParityEven,
SignPositive,
SignNegative,
}
impl FlagsCondition {
/// Returns condition encoded in 3-bit value
pub fn from_u3(code: U3) -> Self {
match code {
U3::N0 => Self::NonZero,
U3::N1 => Self::Zero,
U3::N2 => Self::NonCarry,
U3::N3 => Self::Carry,
U3::N4 => Self::ParityOdd,
U3::N5 => Self::ParityEven,
U3::N6 => Self::SignPositive,
U3::N7 => Self::SignNegative,
}
}
pub fn eval(self, regs: &Regs) -> bool {
let f = regs.get_flags();
match self {
Self::Carry => (f & FLAG_CARRY) != 0,
Self::NonCarry => (f & FLAG_CARRY) == 0,
Self::Zero => (f & FLAG_ZERO) != 0,
Self::NonZero => (f & FLAG_ZERO) == 0,
Self::SignNegative => (f & FLAG_SIGN) != 0,
Self::SignPositive => (f & FLAG_SIGN) == 0,
Self::ParityEven => (f & FLAG_PV) != 0,
Self::ParityOdd => (f & FLAG_PV) == 0,
}
}
}
/// normal execution group, can be modified with prefixes DD, FD, providing
/// DD OPCODE [NN], FD OPCODE [NN] instruction group
///
/// Opcode matching organised based on
/// [document](http://www.z80.info/decoding.htm) by Cristian Dinu
///
/// DAA algorithm
/// [link](http://www.worldofspectrum.org/faq/reference/z80reference.htm#DAA)
pub fn execute_normal(cpu: &mut Z80, bus: &mut impl Z80Bus, opcode: Opcode, prefix: Prefix) {
// 2 first bits of opcode
match opcode.x {
// ---------------------------------
// [0b00yyyzzz] instruction section
// ---------------------------------
// [0b00yyy000] instruction group (NOP, EX, DJNZ, JR)
U2::N0 if opcode.z == U3::N0 => {
match opcode.y {
// NOP, 4 clocks
// [0b00000000] = 0x00
U3::N0 => {}
// EX AF, AF', 4 clocks
// [0b00001000] = 0x08
U3::N1 => {
cpu.regs.swap_af_alt();
}
// DJNZ offset; (4 + 1 + 3) + [5] = 8 or 13 clocks
// [0b00010000] = 0x10
U3::N2 => {
bus.wait_no_mreq(cpu.regs.get_ir(), 1);
// emulate read byte without pc shift
let offset = bus.read(cpu.regs.get_pc(), 3) as i8;
// preform jump if needed
if cpu.regs.dec_reg_8(RegName8::B) != 0 {
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.shift_pc(offset);
};
// inc pc, what left after reading displacement
cpu.regs.inc_pc();
}
// JR offset
// [0b00011000] = 0x18
U3::N3 => {
// same rules as DJNZ
let offset = bus.read(cpu.regs.get_pc(), 3) as i8;
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.shift_pc(offset);
cpu.regs.inc_pc();
}
// JR condition[y-4] displacement; 4 + 3 + [5] = 7/12 clocks
// NZ [0b00100000], Z [0b00101000] NC [0b00110000] C [0b00111000]
U3::N4 | U3::N5 | U3::N6 | U3::N7 => {
// 0x20, 0x28, 0x30, 0x38
let offset = bus.read(cpu.regs.get_pc(), 3) as i8;
// y in range 4..7
let cnd = FlagsCondition::from_u3(U3::from_byte(opcode.y.as_byte() - 4, 0));
if cnd.eval(&cpu.regs) {
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.shift_pc(offset);
};
// inc pc, which left after reading displacement
cpu.regs.inc_pc();
}
};
}
// [0b00ppq001] instruction group (LD, ADD)
U2::N0 if opcode.z == U3::N1 => {
match opcode.q {
// LD rp[p], nn, 4 + 3 + 3 = 10 clcocks
// [0b00pp0001] : 0x01, 0x11, 0x21, 0x31
U1::N0 => {
let reg = RegName16::from_u2_sp(opcode.p).with_prefix(prefix);
let data = cpu.fetch_word(bus, 3);
cpu.regs.set_reg_16(reg, data);
}
// ADD HL/IX/IY, ss ; ss - 16 bit with sp set
// [0b00pp1001] : 0x09; 0x19; 0x29; 0x39
U1::N1 => {
bus.wait_loop(cpu.regs.get_ir(), 7);
let reg_operand = RegName16::from_u2_sp(opcode.p).with_prefix(prefix);
let reg_acc = RegName16::HL.with_prefix(prefix);
let acc = cpu.regs.get_reg_16(reg_acc);
let operand = cpu.regs.get_reg_16(reg_operand);
let temp: u32 = (acc as u32).wrapping_add(operand as u32);
// watch tables module
let lookup = lookup16_r12(acc, operand, temp as u16);
// get last flags, reset affected by instruction
let mut flags = cpu.regs.get_flags() & (FLAG_ZERO | FLAG_PV | FLAG_SIGN);
flags |= HALF_CARRY_ADD_TABLE[(lookup & 0x07) as usize];
flags |= (temp > 0xFFFF) as u8 * FLAG_CARRY;
flags |= F3F5_TABLE[((temp >> 8) as u8) as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_reg_16(reg_acc, temp as u16);
}
};
}
// [0b00ppq010] instruction group (LD INDIRECT)
U2::N0 if opcode.z == U3::N2 => {
match opcode.q {
// LD (BC), A // 4 + 3 = 7 clocks
// [0b00000010] : 0x02
U1::N0 if opcode.p == U2::N0 => {
bus.write(cpu.regs.get_bc(), cpu.regs.get_acc(), 3);
}
// LD (DE), A // 4 + 3 = 7 clocks
// [0b00010010] : 0x12
U1::N0 if opcode.p == U2::N1 => {
bus.write(cpu.regs.get_de(), cpu.regs.get_acc(), 3);
}
// LD (nn), HL/IX/IY // 4 + 3 + 3 + 3 + 3 = 16 clocks
// [0b00100010] : 0x22
U1::N0 if opcode.p == U2::N2 => {
let addr = cpu.fetch_word(bus, 3);
let reg = RegName16::HL.with_prefix(prefix);
bus.write_word(addr, cpu.regs.get_reg_16(reg), 3);
}
// LD (nn), A // 4 + 3 + 3 + 3 = 13 clocks
// [0b00110010] : 0x32
U1::N0 => {
let addr = cpu.fetch_word(bus, 3);
bus.write(addr, cpu.regs.get_acc(), 3);
}
// LD A, (BC) // 4 + 3 = 7 clocks
// [0b00001010] : 0x0A
U1::N1 if opcode.p == U2::N0 => {
let addr = cpu.regs.get_bc();
cpu.regs.set_acc(bus.read(addr, 3));
}
// LD A, (DE) // 4 + 3 = 7 clocks
// [0b00011010] : 0x1A
U1::N1 if opcode.p == U2::N1 => {
let addr = cpu.regs.get_de();
cpu.regs.set_acc(bus.read(addr, 3));
}
// LD HL/IX/IY, (nn) // 4 + 3 + 3 + 3 + 3 = 16 clocks
// [0b00101010] : 0x2A
U1::N1 if opcode.p == U2::N2 => {
let addr = cpu.fetch_word(bus, 3);
let reg = RegName16::HL.with_prefix(prefix);
cpu.regs.set_reg_16(reg, bus.read_word(addr, 3));
}
// LD A, (nn) // 4 + 3 + 3 + 3 = 13 clocks
// [0b00111010] : 0x3A
U1::N1 => {
let addr = cpu.fetch_word(bus, 3);
cpu.regs.set_acc(bus.read(addr, 3));
}
};
}
// [0b00ppq011] instruction group (INC, DEC)
U2::N0 if opcode.z == U3::N3 => {
bus.wait_loop(cpu.regs.get_ir(), 2);
// get register by rp[pp]
let reg = RegName16::from_u2_sp(opcode.p).with_prefix(prefix);
match opcode.q {
// INC BC/DE/HL/IX/IY/SP
// [0b00pp0011] : 0x03, 0x13, 0x23, 0x33
U1::N0 => {
cpu.regs.inc_reg_16(reg);
}
// DEC BC/DE/HL/IX/IY/SP
// [0b00pp1011] : 0x03, 0x13, 0x23, 0x33
U1::N1 => {
cpu.regs.dec_reg_16(reg);
}
};
}
// [0b00yyy100], [0b00yyy101] instruction group (INC, DEC) 8 bit
U2::N0 if (opcode.z == U3::N4) || (opcode.z == U3::N5) => {
let operand;
let data;
let result;
// ------------
// get data
// ------------
if let Some(mut reg) = RegName8::from_u3(opcode.y) {
// INC r[y], DEC r[y] ; IX and IY also used
// INC [0b00yyy100] : 0x04, 0x0C, 0x14, 0x1C, 0x24, 0x2C, 0x3C
// DEC [0b00yyy101] : 0x05, 0x0D, 0x15, 0x1D, 0x25, 0x2D, 0x3D
reg = reg.with_prefix(prefix);
data = cpu.regs.get_reg_8(reg);
operand = LoadOperand8::Reg(reg);
} else {
// INC (HL)/(IX + d)/(IY + d), DEC (HL)/(IX + d)/(IY + d) ; INDIRECT
// INC [0b00110100], DEC [0b00110101] : 0x34, 0x35
let addr = if prefix == Prefix::None {
// we have IND/DEC (HL)
cpu.regs.get_hl()
} else {
// we have INC/DEC (IX/IY + d)
let d = bus.read(cpu.regs.get_pc(), 3) as i8;
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.inc_pc();
cpu.regs
.get_reg_16_with_displacement(RegName16::HL.with_prefix(prefix), d)
};
// read data
data = bus.read(addr, 3);
bus.wait_no_mreq(addr, 1);
operand = LoadOperand8::Indirect(addr);
};
// ------------
// execute
// ------------
// carry unaffected
let mut flags = cpu.regs.get_flags() & FLAG_CARRY;
if opcode.z == U3::N4 {
// INC
result = data.wrapping_add(1);
flags |= (data == 0x7F) as u8 * FLAG_PV;
let lookup = lookup8_r12(data, 1, result);
flags |= HALF_CARRY_ADD_TABLE[(lookup & 0x07) as usize];
} else {
// DEC
result = data.wrapping_sub(1);
flags |= FLAG_SUB;
flags |= (data == 0x80) as u8 * FLAG_PV;
let lookup = lookup8_r12(data, 1, result);
flags |= HALF_CARRY_SUB_TABLE[(lookup & 0x07) as usize];
}
flags |= SZF3F5_TABLE[result as usize];
cpu.regs.set_flags(flags);
// ------------
// write data
// ------------
match operand {
LoadOperand8::Indirect(addr) => {
bus.write(addr, result, 3);
}
LoadOperand8::Reg(reg) => {
cpu.regs.set_reg_8(reg, result);
}
};
// Clocks:
// Direct : 4
// HL : 4 + 3 + 1 + 3 = 11
// XY+d : 4 + 4 + 3 + 5 + 3 + 1 + 3 = 23
}
// [0b00yyy110] instruction group (LD R, N 8 bit) :
// 0x06, 0x0E, 0x16, 0x1E, 0x26, 0x2E, 0x36, 0x3E
U2::N0 if opcode.z == U3::N6 => {
let operand = if let Some(reg) = RegName8::from_u3(opcode.y) {
// Direct LD R, N
LoadOperand8::Reg(reg.with_prefix(prefix))
} else {
// INDIRECT LD (HL/IX+d/IY+d), N <PREFIX>[0b00110110] : 0x36
let addr = if prefix == Prefix::None {
// LD (HL)
cpu.regs.get_hl()
} else {
// LD (IX+d/ IY+d)
let d = cpu.fetch_byte(bus, 3) as i8;
cpu.regs
.get_reg_16_with_displacement(RegName16::HL.with_prefix(prefix), d)
};
LoadOperand8::Indirect(addr)
};
// Read const operand
let data = bus.read(cpu.regs.get_pc(), 3);
// if non-prefixed and there is no indirection
if prefix != Prefix::None {
if let LoadOperand8::Indirect(_) = operand {
bus.wait_loop(cpu.regs.get_pc(), 2);
}
}
cpu.regs.inc_pc();
// write to bus or reg
match operand {
LoadOperand8::Indirect(addr) => {
bus.write(addr, data, 3);
}
LoadOperand8::Reg(reg) => {
cpu.regs.set_reg_8(reg, data);
}
};
// Clocks:
// Direct: 4 + 3 = 7
// HL: 4 + 3 + 3 = 10
// XY+d: 4 + 4 + 3 + 3 + 2 + 3 = 19
}
// [0b00yyy111] instruction group (Assorted)
U2::N0 => {
match opcode.y {
// RLCA ; Rotate left; msb will become lsb; carry = msb
// [0b00000111] : 0x07
U3::N0 => {
let mut data = cpu.regs.get_acc();
let carry = (data & 0x80) != 0;
data <<= 1;
if carry {
data |= 1;
} else {
data &= 0xFE;
};
let mut flags = cpu.regs.get_flags() & (FLAG_PV | FLAG_SIGN | FLAG_ZERO);
flags |= carry as u8 * FLAG_CARRY;
flags |= F3F5_TABLE[data as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(data);
}
// RRCA ; Rotate right; lsb will become msb; carry = lsb
// [0b00001111] : 0x0F
U3::N1 => {
let mut data = cpu.regs.get_acc();
let carry = (data & 0x01) != 0;
data >>= 1;
if carry {
data |= 0x80;
} else {
data &= 0x7F;
};
let mut flags = cpu.regs.get_flags() & (FLAG_PV | FLAG_SIGN | FLAG_ZERO);
flags |= carry as u8 * FLAG_CARRY;
flags |= F3F5_TABLE[data as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(data);
}
// RLA Rotate left trough carry
// [0b00010111]: 0x17
U3::N2 => {
let mut data = cpu.regs.get_acc();
let carry = (data & 0x80) != 0;
data <<= 1;
if (cpu.regs.get_flags() & FLAG_CARRY) != 0 {
data |= 1;
} else {
data &= 0xFE;
};
let mut flags = cpu.regs.get_flags() & (FLAG_PV | FLAG_SIGN | FLAG_ZERO);
flags |= carry as u8 * FLAG_CARRY;
flags |= F3F5_TABLE[data as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(data);
}
// RRA Rotate right trough carry
// [0b00011111] : 0x1F
U3::N3 => {
let mut data = cpu.regs.get_acc();
let carry = (data & 0x01) != 0;
data >>= 1;
if (cpu.regs.get_flags() & FLAG_CARRY) != 0 {
data |= 0x80;
} else {
data &= 0x7F;
};
let mut flags = cpu.regs.get_flags() & (FLAG_PV | FLAG_SIGN | FLAG_ZERO);
flags |= carry as u8 * FLAG_CARRY;
flags |= F3F5_TABLE[data as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(data);
}
// DAA [0b00100111] [link to the algorithm in header]
U3::N4 => {
let acc = cpu.regs.get_acc();
let old_flags = cpu.regs.get_flags();
let mut flags = old_flags & FLAG_SUB;
let mut correction;
if (acc > 0x99) || ((old_flags & FLAG_CARRY) != 0) {
correction = 0x60_u8;
flags |= FLAG_CARRY;
} else {
correction = 0x00_u8;
};
if ((acc & 0x0F) > 0x09) || ((old_flags & FLAG_HALF_CARRY) != 0) {
correction |= 0x06;
}
let acc_new = if (old_flags & FLAG_SUB) == 0 {
let lookup = lookup8_r12(acc, correction, acc.wrapping_add(correction));
flags |= HALF_CARRY_ADD_TABLE[(lookup & 0x07) as usize];
acc.wrapping_add(correction)
} else {
let lookup = lookup8_r12(acc, correction, acc.wrapping_sub(correction));
flags |= HALF_CARRY_SUB_TABLE[(lookup & 0x07) as usize];
acc.wrapping_sub(correction)
};
flags |= SZPF3F5_TABLE[acc_new as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(acc_new);
}
// CPL Invert (Complement)
// [0b00101111] : 0x2F
U3::N5 => {
let data = !cpu.regs.get_acc();
let mut flags = cpu.regs.get_flags() & !(FLAG_F3 | FLAG_F5);
flags |= FLAG_HALF_CARRY | FLAG_SUB | F3F5_TABLE[data as usize];
cpu.regs.set_flags(flags);
cpu.regs.set_acc(data);
}
// SCF Set carry flag
// [0b00110111] : 0x37
U3::N6 => {
let data = cpu.regs.get_acc();
let mut flags = cpu.regs.get_flags() & (FLAG_ZERO | FLAG_PV | FLAG_SIGN);
flags |= F3F5_TABLE[data as usize] | FLAG_CARRY;
cpu.regs.set_flags(flags);
}
// CCF Invert carry flag
// [0b00111111] : 0x3F
U3::N7 => {
let data = cpu.regs.get_acc();
let old_carry = (cpu.regs.get_flags() & FLAG_CARRY) != 0;
let mut flags = cpu.regs.get_flags() & (FLAG_SIGN | FLAG_PV | FLAG_ZERO);
flags |= F3F5_TABLE[data as usize];
flags |= old_carry as u8 * FLAG_HALF_CARRY;
flags |= (!old_carry) as u8 * FLAG_CARRY;
cpu.regs.set_flags(flags);
}
}
}
// HALT
// [0b01110110] : 0x76
U2::N1 if (opcode.z == U3::N6) && (opcode.y == U3::N6) => {
cpu.halted = true;
bus.halt(true);
cpu.regs.dec_pc();
}
// ---------------------------------
// [0b01yyyzzz] instruction section
// ---------------------------------
// From memory to register
// LD r[y], (HL/IX+d/IY+d)
U2::N1 if (opcode.z == U3::N6) => {
let src_addr = if prefix == Prefix::None {
cpu.regs.get_hl()
} else {
let d = bus.read(cpu.regs.get_pc(), 3) as i8;
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.inc_pc();
cpu.regs
.get_reg_16_with_displacement(RegName16::HL.with_prefix(prefix), d)
};
cpu.regs
.set_reg_8(RegName8::from_u3(opcode.y).unwrap(), bus.read(src_addr, 3));
// Clocks:
// HL: <4> + 3 = 7
// XY+d: <[4] + 4> + [3 + 5] + 3 = 19
}
// LD (HL/IX+d/IY+d), r[z]
U2::N1 if (opcode.y == U3::N6) => {
let dst_addr = if prefix == Prefix::None {
cpu.regs.get_hl()
} else {
let d = bus.read(cpu.regs.get_pc(), 3) as i8;
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.inc_pc();
cpu.regs
.get_reg_16_with_displacement(RegName16::HL.with_prefix(prefix), d)
};
bus.write(
dst_addr,
cpu.regs.get_reg_8(RegName8::from_u3(opcode.z).unwrap()),
3,
);
// Clocks:
// HL: 4 + 3 = 7
// XY+d: 4 + 4 + 3 + 5 + 3 = 19
}
// LD r[y], r[z]
U2::N1 => {
let from = RegName8::from_u3(opcode.z).unwrap().with_prefix(prefix);
let to = RegName8::from_u3(opcode.y).unwrap().with_prefix(prefix);
let tmp = cpu.regs.get_reg_8(from);
cpu.regs.set_reg_8(to, tmp);
}
// ---------------------------------
// [0b10yyyzzz] instruction section
// ---------------------------------
// alu[y], operand[z-based]; 0x80...0xBF
U2::N2 => {
let operand = if let Some(reg) = RegName8::from_u3(opcode.z) {
// alu[y] reg
cpu.regs.get_reg_8(reg.with_prefix(prefix))
} else {
// alu[y] (HL/IX+d/IY+d)
if prefix == Prefix::None {
bus.read(cpu.regs.get_hl(), 3)
} else {
let d = bus.read(cpu.regs.get_pc(), 3) as i8;
bus.wait_loop(cpu.regs.get_pc(), 5);
cpu.regs.inc_pc();
let addr = cpu
.regs
.get_reg_16_with_displacement(RegName16::HL.with_prefix(prefix), d);
bus.read(addr, 3)
}
};
execute_alu_8(cpu, opcode.y, operand);
// Clocks:
// Direct: 4
// HL: 4 + 3
// XY+d: 4 + 4 + 3 + 5 + 3 = 19
}
// ---------------------------------
// [0b11yyyzzz] instruction section
// ---------------------------------
// RET cc[y]
// [0b11yyy000] : C0; C8; D0; D8; E0; E8; F0; F8;
U2::N3 if opcode.z == U3::N0 => {
bus.wait_no_mreq(cpu.regs.get_ir(), 1);
if FlagsCondition::from_u3(opcode.y).eval(&cpu.regs) {
// write value from stack to pc
execute_pop_16(cpu, bus, RegName16::PC, 3);
};
// Clocks:
// 4 + 1 + [3 + 3] = 5/11
}
// [0b11ppq001] instruction group
U2::N3 if opcode.z == U3::N1 => {
match opcode.q {
// POP (AF/BC/DE/HL/IX/IY) ; pop 16 bit register featuring A
// [0b11pp0001]: C1; D1; E1; F1;
U1::N0 => {
execute_pop_16(
cpu,
bus,
RegName16::from_u2_af(opcode.p).with_prefix(prefix),
3,
);
// Clocks:
// [4] + 4 + 3 + 3 = 10 / 14
}
// [0b11pp1001] instruction group (assorted)
U1::N1 => {
match opcode.p {
// RET ; return
// [0b11001001] : C9;
U2::N0 => {
execute_pop_16(cpu, bus, RegName16::PC, 3);
// Clocks: 10
}
// EXX
// [0b11011001] : D9;
U2::N1 => {
cpu.regs.exx();
}
// JP HL/IX/IY
// [0b11101001] : E9
U2::N2 => {
let addr = cpu.regs.get_reg_16(RegName16::HL.with_prefix(prefix));
cpu.regs.set_pc(addr);
}
// LD SP, HL/IX/IY
// [0b11111001] : F9
U2::N3 => {
bus.wait_loop(cpu.regs.get_ir(), 2);
let data = cpu.regs.get_reg_16(RegName16::HL.with_prefix(prefix));
cpu.regs.set_sp(data);
}
}
}
};
}
// JP cc[y], nn
// [0b11yyy010]: C2,CA,D2,DA,E2,EA,F2,FA
U2::N3 if opcode.z == U3::N2 => {
let addr = cpu.fetch_word(bus, 3);
if FlagsCondition::from_u3(opcode.y).eval(&cpu.regs) {
cpu.regs.set_pc(addr);
};
}
// [0b11yyy011] instruction group (assorted)
U2::N3 if opcode.z == U3::N3 => {
match opcode.y {
// JP nn
// [0b11000011]: C3
U3::N0 => {
let addr = cpu.fetch_word(bus, 3);
cpu.regs.set_pc(addr);
}
// CB prefix
U3::N1 => {
panic!("CB prefix passed as non-prefixed instruction");
}
// OUT (n), A
// [0b11010011] : D3
U3::N2 => {
let data = cpu.fetch_byte(bus, 3);
let acc = cpu.regs.get_acc();
// write Acc to port A*256 + operand
bus.write_io(((acc as u16) << 8) | data as u16, acc);
}
// IN A, (n)
// [0b11011011] : DB
U3::N3 => {
let data = cpu.fetch_byte(bus, 3);
let acc = cpu.regs.get_acc();
// read from port A*256 + operand to Acc
cpu.regs
.set_acc(bus.read_io(((acc as u16) << 8) | (data as u16)));
}
// EX (SP), HL/IX/IY
// [0b11100011] : E3
U3::N4 => {
let reg = RegName16::HL.with_prefix(prefix);
let addr = cpu.regs.get_sp();
let tmp = bus.read_word(addr, 3);
bus.wait_no_mreq(addr.wrapping_add(1), 1);
let [l, h] = cpu.regs.get_reg_16(reg).to_le_bytes();
bus.write(addr.wrapping_add(1), h, 3);
bus.write(addr, l, 3);
// bus.write_word(addr, cpu.regs.get_reg_16(reg), 3);
bus.wait_loop(addr, 2);
cpu.regs.set_reg_16(reg, tmp);
// Clocks: [4] + 4 + (3 + 3) + 1 + (3 + 3) + 2 = 23 or 19
}
// EX DE, HL
// [0b11101011]
U3::N5 => {
let de = cpu.regs.get_de();
let hl = cpu.regs.get_hl();
cpu.regs.set_de(hl);
cpu.regs.set_hl(de);
}
// DI
// [0b11110011] : F3
U3::N6 => {
// skip interrupt check and reset flip-flops
cpu.skip_interrupt = true;
cpu.regs.set_iff1(false);
cpu.regs.set_iff2(false);
}
// EI
// [0b11111011] : FB
U3::N7 => {
// skip interrupt check and set flip-flops
cpu.skip_interrupt = true;
cpu.regs.set_iff1(true);
cpu.regs.set_iff2(true);
}
}
}
// CALL cc[y], nn
// [0b11ccc100] : C4; CC; D4; DC; E4; EC; F4; FC
U2::N3 if opcode.z == U3::N4 => {
let addr_l = cpu.fetch_byte(bus, 3);
let addr_h = bus.read(cpu.regs.get_pc(), 3);
let addr = u16::from_le_bytes([addr_l, addr_h]);
if FlagsCondition::from_u3(opcode.y).eval(&cpu.regs) {
bus.wait_no_mreq(cpu.regs.get_pc(), 1);
cpu.regs.inc_pc();
execute_push_16(cpu, bus, RegName16::PC, 3);
cpu.regs.set_pc(addr);
} else {
cpu.regs.inc_pc();
}
}
// [0b11ppq101] opcodes group : PUSH rp2[p], CALL nn
U2::N3 if opcode.z == U3::N5 => {
match opcode.q {
// PUSH rp2[p]
// [0b11pp0101] : C5; D5; E5; F5;
U1::N0 => {
bus.wait_no_mreq(cpu.regs.get_ir(), 1);
execute_push_16(
cpu,
bus,
RegName16::from_u2_af(opcode.p).with_prefix(prefix),
3,
);
}
U1::N1 => {
match opcode.p {
// CALL nn
// [0b11001101] : CD
U2::N0 => {
let addr_l = cpu.fetch_byte(bus, 3);
let addr_h = bus.read(cpu.regs.get_pc(), 3);
let addr = u16::from_le_bytes([addr_l, addr_h]);
bus.wait_no_mreq(cpu.regs.get_pc(), 1);
cpu.regs.inc_pc();
execute_push_16(cpu, bus, RegName16::PC, 3);
cpu.regs.set_pc(addr);
}
// [0b11011101] : DD
U2::N1 => {
panic!("DD prefix passed as non-prefixed instruction");
}
// [0b11101101] : ED
U2::N2 => {
panic!("ED prefix passed as non-prefixed instruction");
}
// [0b11111101] : FD
U2::N3 => {
panic!("FD prefix passed as non-prefixed instruction");
}
}
}
}
}
// alu[y] NN
// [0b11yyy110] : C6; CE; D6; DE; E6; EE; F6; FE
U2::N3 if opcode.z == U3::N6 => {
let operand = cpu.fetch_byte(bus, 3);
execute_alu_8(cpu, opcode.y, operand);
}
// RST y*8
// [0b11yyy111]
U2::N3 => {
bus.wait_no_mreq(cpu.regs.get_ir(), 1);
execute_push_16(cpu, bus, RegName16::PC, 3);
// CALL y*8
cpu.regs
.set_reg_16(RegName16::PC, (opcode.y.as_byte() as u16) * 8);
// 4 + 1 + 3 + 3 = 11
}
};
}
|
use smol::Timer;
use std::time::Duration;
/// Sleep for any number of seconds.
pub async fn sleep(seconds: u32) {
Timer::after(Duration::from_secs(seconds.into())).await;
}
|
//! Unique keys and key paths.
use std::hash::Hash;
use std::panic::Location;
/// A unique call location.
///
/// These come from `#[track_caller]` annotations. It is a newtype
/// so we can use it as a key in various contexts; the traits we
/// want are not implemented on the inner type.
#[derive(Clone, Copy, Debug)]
pub struct Caller(&'static Location<'static>);
#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Hash, Debug)]
pub struct Key {
/// The caller that originated the mutation.
pub(crate) caller: Caller,
/// The sequence index.
///
/// At some point, we probably should accommodate user-provided
/// stable identities, but for now we just assume that it consists
/// of the caller and sequence number.
pub(crate) seq_ix: usize,
}
impl Key {
pub fn new(caller: impl Into<Caller>, seq_ix: usize) -> Key {
Key {
caller: caller.into(),
seq_ix,
}
}
/// A null key, which will always equal itself.
///
/// In the future, this might be implemented differently, as Key will
/// possibly expand to accommodate user-provided keys and callers from
/// different runtimes such as scripting languages.
pub fn null() -> Key {
#[track_caller]
fn null_caller() -> Caller {
Location::caller().into()
}
Key::new(null_caller(), 0)
}
}
impl Caller {
/// The pointer to the location metadata
///
/// Unique locations are expected to have unique pointers. This
/// is perhaps not formally guaranteed by the language spec, but
/// it's hard to imagine how it can be implemented otherwise.
fn as_ptr(&self) -> *const Location<'static> {
self.0
}
}
impl PartialEq for Caller {
fn eq(&self, other: &Caller) -> bool {
self.as_ptr() == other.as_ptr()
}
}
impl Eq for Caller {}
impl Hash for Caller {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.as_ptr().hash(state)
}
}
impl PartialOrd for Caller {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.as_ptr().partial_cmp(&other.as_ptr())
}
}
impl Ord for Caller {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_ptr().cmp(&other.as_ptr())
}
}
impl From<&'static Location<'static>> for Caller {
fn from(inner: &'static Location<'static>) -> Self {
Caller(inner)
}
}
|
#[test]
fn t24_outline_basic_render() {
use agg::{Pixfmt,Rgb8,Rgba8};
use agg::{RendererPrimatives,RasterizerOutline};
let pix = Pixfmt::<Rgb8>::new(100,100);
let mut ren_base = agg::RenderingBase::new(pix);
ren_base.clear( Rgba8::new(255, 255, 255, 255) );
let mut ren = RendererPrimatives::with_base(&mut ren_base);
ren.line_color(agg::Rgba8::new(0,0,0,255));
let mut path = agg::Path::new();
path.move_to(10.0, 10.0);
path.line_to(50.0, 90.0);
path.line_to(90.0, 10.0);
let mut ras = RasterizerOutline::with_primative(&mut ren);
ras.add_path(&path);
ren_base.to_file("tests/tmp/primative.png").unwrap();
//assert!(agg::ppm::img_diff("tests/tmp/primative.png",
// "images/primative.png").unwrap());
}
|
//! blci
/// Isolate lowest clear bit
pub trait Blci {
/// Sets all bits of `self` to 1 except for the least significant zero bit.
///
/// If there is no zero bit in `self`, it sets all bits.
///
/// # Instructions
///
/// - [`BLCI`](http://support.amd.com/TechDocs/24594.pdf):
/// - Description: Isolate lowest clear bit.
/// - Architecture: x86.
/// - Instruction set: TBM.
/// - Registers: 32/64 bit.
///
/// # Example
///
/// ```
/// # use bitintr::*;
/// assert_eq!(0b0101_0000u8.blci(), 0b1111_1110u8);
/// assert_eq!(0b1111_1111u8.blci(), 0b1111_1111u8);
/// ```
fn blci(self) -> Self;
}
macro_rules! impl_blci {
($id:ident) => {
impl Blci for $id {
#[inline]
fn blci(self) -> Self {
self | !(self.wrapping_add(1))
}
}
};
}
impl_all!(impl_blci: u8, u16, u32, u64, i8, i16, i32, i64);
|
use crate::error::Error;
use crate::poseidon::SimplePoseidonBatchHasher;
use crate::{Arity, BatchHasher, Strength, DEFAULT_STRENGTH};
use generic_array::GenericArray;
use paired::bls12_381::Fr;
use std::marker::PhantomData;
#[derive(Clone, Copy, Debug)]
pub enum BatcherType {
GPU,
CPU,
}
#[cfg(not(target_os = "macos"))]
use crate::gpu::GPUBatchHasher;
pub enum Batcher<'a, A>
where
A: Arity<Fr>,
{
#[cfg(not(target_os = "macos"))]
GPU(GPUBatchHasher<'a, A>),
#[cfg(target_os = "macos")]
GPU(NoGPUBatchHasher<A>),
CPU(SimplePoseidonBatchHasher<'a, A>),
}
impl<A> Batcher<'_, A>
where
A: Arity<Fr>,
{
pub(crate) fn t(&self) -> BatcherType {
match self {
Batcher::GPU(_) => BatcherType::GPU,
Batcher::CPU(_) => BatcherType::CPU,
}
}
pub(crate) fn new(t: &BatcherType, max_batch_size: usize) -> Result<Self, Error> {
Self::new_with_strength(DEFAULT_STRENGTH, t, max_batch_size)
}
pub(crate) fn new_with_strength(
strength: Strength,
t: &BatcherType,
max_batch_size: usize,
) -> Result<Self, Error> {
match t {
#[cfg(all(feature = "gpu", target_os = "macos"))]
BatcherType::GPU => panic!("GPU unimplemented on macos"),
#[cfg(all(feature = "gpu", not(target_os = "macos")))]
BatcherType::GPU => Ok(Batcher::GPU(GPUBatchHasher::<A>::new_with_strength(
strength,
max_batch_size,
)?)),
BatcherType::CPU => Ok(Batcher::CPU(
SimplePoseidonBatchHasher::<A>::new_with_strength(strength, max_batch_size)?,
)),
}
}
}
impl<A> BatchHasher<A> for Batcher<'_, A>
where
A: Arity<Fr>,
{
fn hash(&mut self, preimages: &[GenericArray<Fr, A>]) -> Result<Vec<Fr>, Error> {
match self {
Batcher::GPU(batcher) => batcher.hash(preimages),
Batcher::CPU(batcher) => batcher.hash(preimages),
}
}
fn max_batch_size(&self) -> usize {
match self {
Batcher::GPU(batcher) => batcher.max_batch_size(),
Batcher::CPU(batcher) => batcher.max_batch_size(),
}
}
}
// /// NoGPUBatchHasher is a dummy required so we can build with the gpu flag even on platforms on which we cannot currently
// /// run with GPU.
pub struct NoGPUBatchHasher<A>(PhantomData<A>);
impl<A> BatchHasher<A> for NoGPUBatchHasher<A>
where
A: Arity<Fr>,
{
fn hash(&mut self, _preimages: &[GenericArray<Fr, A>]) -> Result<Vec<Fr>, Error> {
unimplemented!();
}
fn max_batch_size(&self) -> usize {
unimplemented!();
}
}
|
use std::fmt::Display;
use std::str::FromStr;
use std::path::Path;
pub fn arg_err(idx: usize, str: &str, message: impl Display) {
println!("Error in arg {} \"{}\": {}", idx + 1, str, message)
}
pub trait ArgParser<'a, T> {
/// Possible names, including any "-"
const NAMES: &'static [&'static str];
/// Possible values, only used for help
const VALUES: &'static [&'static str];
const DESCRIPTION: &'static str;
fn parse_arg_value(value: Option<&'a str>) -> Result<T, String>;
fn try_parse(input: &'a[String], used: &mut [bool]) -> Option<T> {
input.iter().enumerate().filter_map(|(idx, it)| {
for name in Self::NAMES {
if it.starts_with(name) {
let rest = &it[name.len()..];
return if rest.is_empty() {
Some((idx, it, Self::parse_arg_value(None).map_err(|err| arg_err(idx, it, err))))
} else if rest.starts_with("=") {
Some((idx, it, Self::parse_arg_value(Some(&rest[1..])).map_err(|err|arg_err(idx, it, err))))
} else {
None
};
}
}
None
}).map(|it| {
used[it.0] = true;
it
}).fold(None, |value, (idx, it, res)| {
match res {
Ok(ok) => {
if value.is_none() {
Some(ok)
} else {
arg_err(idx, it, "Arg has already been set, ignoring");
value
}
}
Err(()) => {
value
}
}
})
}
}
pub trait DefaultArgParser<'a, T>: ArgParser<'a, T> {
const DEFAULT: T;
fn parse(input: &'a[String], used: &mut [bool]) -> T {
let value = Self::try_parse(input, used);
value.unwrap_or(Self::DEFAULT)
}
}
pub trait BoolParser<'a>: ArgParser<'a, bool> {
const NAMES: &'static [&'static str];
const DESCRIPTION: &'static str;
}
impl<'a, T: BoolParser<'a>> ArgParser<'a, bool> for T {
const NAMES: &'static [&'static str] = <T as BoolParser>::NAMES;
const VALUES: &'static [&'static str] = &["true", "t", "y", "false", "f", "n"];
const DESCRIPTION: &'static str = <T as BoolParser>::DESCRIPTION;
fn parse_arg_value(value: Option<&str>) -> Result<bool, String> {
if let Some(value) = value {
match value {
"true" | "t" | "y" => Ok(true),
"false" | "f" | "n" => Ok(false),
_ => Err(format!("Invalid value \"{}\", must be one of {}", value, Self::VALUES.join(", ")))
}
} else {
Ok(true)
}
}
}
impl<'a, T: BoolParser<'a>> DefaultArgParser<'a, bool> for T {
const DEFAULT: bool = false;
}
pub trait F64Parser<'a>: ArgParser<'a, f64> {
const NAMES: &'static [&'static str];
const DESCRIPTION: &'static str;
}
impl<'a, T: F64Parser<'a>> ArgParser<'a, f64> for T {
const NAMES: &'static [&'static str] = <T as F64Parser>::NAMES;
const VALUES: &'static [&'static str] = &["<float>", "<double>", "<int>"];
const DESCRIPTION: &'static str = <T as F64Parser>::DESCRIPTION;
fn parse_arg_value(value: Option<&str>) -> Result<f64, String> {
if let Some(value) = value {
match f64::from_str(value) {
Ok(value) => Ok(value),
Err(err) => {
Err(format!("\"{}\": {}", value, err))
}
}
} else {
Err("Arg value is not optional, --help for more info".into())
}
}
}
pub trait StringParser<'a>: ArgParser<'a, &'a str> {
const NAMES: &'static [&'static str];
const DESCRIPTION: &'static str;
}
impl<'a, T: StringParser<'a>> ArgParser<'a, &'a str> for T {
const NAMES: &'static [&'static str] = <T as StringParser>::NAMES;
const VALUES: &'static [&'static str] = &["<string>"];
const DESCRIPTION: &'static str = <T as StringParser>::DESCRIPTION;
fn parse_arg_value(value: Option<&'a str>) -> Result<&'a str, String> {
if let Some(value) = value {
Ok(value)
} else {
Err("Arg value is not optional, --help for more info".into())
}
}
}
pub trait FileParser<'a>: ArgParser<'a, &'a Path> {
const NAMES: &'static [&'static str];
const DESCRIPTION: &'static str;
}
impl<'a, T: FileParser<'a>> ArgParser<'a, &'a Path> for T {
const NAMES: &'static [&'static str] = <T as FileParser>::NAMES;
const VALUES: &'static [&'static str] = &["<path>"];
const DESCRIPTION: &'static str = <T as FileParser>::DESCRIPTION;
fn parse_arg_value(value: Option<&'a str>) -> Result<&'a Path, String> {
if let Some(value) = value {
Ok(Path::new(value))
} else {
Err("Arg value is not optional, --help for more info".into())
}
}
}
|
use assert_cmd::prelude::*;
use std::env::set_current_dir;
use std::error::Error;
use std::fs::read_to_string;
use std::fs::remove_dir_all;
use std::path::Path;
use std::process::Command;
mod test_utilities;
use test_utilities::{create_toml_config, ensure_correct_dir, ConfigToml, ConfigV03X};
/// Upgrades a v0.1.x config to v0.2.x.
#[test]
fn upgrade_config_v_0_1x_to_v_0_2x() -> Result<(), Box<dyn Error>> {
ensure_correct_dir();
create_toml_config(false);
set_current_dir("platformer_modules")?;
let config_str = read_to_string("project.toml")?;
let config_toml: ConfigToml = toml::from_str(&config_str)?;
// 1. Assert the original configuration is correct.
assert_eq!(config_toml.godot_project_name, "platformer");
assert_eq!(config_toml.modules, vec!["Player", "MainScene"]);
// 2. Assert that the upgrade command was successful.
let mut cmd_upgrade = Command::new("cargo");
cmd_upgrade
.arg("run")
.arg("--manifest-path=../../Cargo.toml")
.arg("upgrade")
.arg("0.2.x");
cmd_upgrade.assert().success();
// 3. Assert that the old configuration file doesn't exist anymore.
let old_config_path = Path::new("project.toml");
assert_eq!(old_config_path.exists(), false);
// 4. Assert that the new configuration file exists.
let new_config_path = Path::new("godot-rust-cli.toml");
assert_eq!(new_config_path.exists(), true);
let new_config_str = read_to_string(new_config_path)?;
let new_config_toml: ConfigToml = toml::from_str(&new_config_str)?;
// 5. Assert that the new configuration is correct.
assert_eq!(new_config_toml.godot_project_name, "platformer");
assert_eq!(new_config_toml.modules, vec!["Player", "MainScene"]);
set_current_dir("../")?;
remove_dir_all("platformer_modules")?;
Ok(())
}
/// Upgrades a v0.1.x config to v0.3.x.
#[test]
fn upgrade_config_v_0_1x_to_v_0_3x() -> Result<(), Box<dyn Error>> {
ensure_correct_dir();
create_toml_config(false);
set_current_dir("platformer_modules")?;
let config_str = read_to_string("project.toml")?;
let config_toml: ConfigToml = toml::from_str(&config_str)?;
// 5. Assert that the configuration is correct.
assert_eq!(config_toml.godot_project_name, "platformer");
assert_eq!(config_toml.modules, vec!["Player", "MainScene"]);
// 2. Assert that the upgrade command was successful.
let mut cmd_upgrade = Command::new("cargo");
cmd_upgrade
.arg("run")
.arg("--manifest-path=../../Cargo.toml")
.arg("upgrade")
.arg("0.3.x");
cmd_upgrade.assert().success();
// 3. Assert that the old configuration file doesn't exist anymore.
let old_config_path = Path::new("project.toml");
assert_eq!(old_config_path.exists(), false);
// 4. Assert that the new configuration file exists.
let new_config_path = Path::new("godot-rust-cli.json");
assert_eq!(new_config_path.exists(), true);
let new_config_str = read_to_string(new_config_path)?;
let new_config_json: ConfigV03X = serde_json::from_str(&new_config_str)?;
// 1. Assert the original configuration is correct.
assert_eq!(
new_config_json.name,
serde_json::json!("platformer_modules")
);
assert_eq!(
new_config_json.godot_project_name,
serde_json::json!("platformer")
);
assert_eq!(new_config_json.is_plugin, serde_json::json!(false));
assert_eq!(new_config_json.modules[0], "Player");
assert_eq!(new_config_json.modules[1], "MainScene");
set_current_dir("../")?;
remove_dir_all("platformer_modules")?;
Ok(())
}
/// Upgrades a v0.2.x config to v0.3.x.
#[test]
fn upgrade_config_v_0_2x_to_v_0_3x() -> Result<(), Box<dyn Error>> {
ensure_correct_dir();
create_toml_config(true);
set_current_dir("platformer_modules")?;
let config_str = read_to_string("godot-rust-cli.toml")?;
let config_toml: ConfigToml = toml::from_str(&config_str)?;
// 5. Assert that the configuration is correct.
assert_eq!(config_toml.godot_project_name, "platformer");
assert_eq!(config_toml.modules, vec!["Player", "MainScene"]);
// 2. Assert that the upgrade command was successful.
let mut cmd_upgrade = Command::new("cargo");
cmd_upgrade
.arg("run")
.arg("--manifest-path=../../Cargo.toml")
.arg("upgrade")
.arg("0.3.x");
cmd_upgrade.assert().success();
// 3. Assert that the old configuration file doesn't exist anymore.
let old_config_path = Path::new("project.toml");
assert_eq!(old_config_path.exists(), false);
// 4. Assert that the new configuration file exists.
let new_config_path = Path::new("godot-rust-cli.json");
assert_eq!(new_config_path.exists(), true);
let new_config_str = read_to_string(new_config_path)?;
let new_config_json: ConfigV03X = serde_json::from_str(&new_config_str)?;
// 1. Assert the original configuration is correct.
assert_eq!(
new_config_json.name,
serde_json::json!("platformer_modules")
);
assert_eq!(
new_config_json.godot_project_name,
serde_json::json!("platformer")
);
assert_eq!(new_config_json.is_plugin, serde_json::json!(false));
assert_eq!(new_config_json.modules[0], "Player");
assert_eq!(new_config_json.modules[1], "MainScene");
set_current_dir("../")?;
remove_dir_all("platformer_modules")?;
Ok(())
}
|
use crate::header_verifier::{NumberVerifier, PowVerifier, TimestampVerifier, VersionVerifier};
use crate::{BlockErrorKind, NumberError, PowError, TimestampError, ALLOWED_FUTURE_BLOCKTIME};
use ckb_error::assert_error_eq;
use ckb_pow::PowEngine;
use ckb_test_chain_utils::MockMedianTime;
use ckb_types::{constants::BLOCK_VERSION, core::HeaderBuilder, packed::Header, prelude::*};
use faketime::unix_time_as_millis;
fn mock_median_time_context() -> MockMedianTime {
let now = unix_time_as_millis();
let timestamps = (0..100).map(|_| now).collect();
MockMedianTime::new(timestamps)
}
#[test]
pub fn test_version() {
let header = HeaderBuilder::default()
.version((BLOCK_VERSION + 1).pack())
.build();
let verifier = VersionVerifier::new(&header, BLOCK_VERSION);
assert_error_eq!(verifier.verify().unwrap_err(), BlockErrorKind::Version);
}
#[cfg(not(disable_faketime))]
#[test]
fn test_timestamp() {
let faketime_file = faketime::millis_tempfile(100_000).expect("create faketime file");
faketime::enable(&faketime_file);
let fake_block_median_time_context = mock_median_time_context();
let timestamp = unix_time_as_millis() + 1;
let header = HeaderBuilder::default()
.number(10u64.pack())
.timestamp(timestamp.pack())
.build();
let timestamp_verifier = TimestampVerifier::new(&fake_block_median_time_context, &header);
assert!(timestamp_verifier.verify().is_ok());
}
#[cfg(not(disable_faketime))]
#[test]
fn test_timestamp_too_old() {
let faketime_file = faketime::millis_tempfile(100_000).expect("create faketime file");
faketime::enable(&faketime_file);
let fake_block_median_time_context = mock_median_time_context();
let min = unix_time_as_millis();
let timestamp = unix_time_as_millis() - 1;
let header = HeaderBuilder::default()
.number(10u64.pack())
.timestamp(timestamp.pack())
.build();
let timestamp_verifier = TimestampVerifier::new(&fake_block_median_time_context, &header);
assert_error_eq!(
timestamp_verifier.verify().unwrap_err(),
TimestampError::BlockTimeTooOld {
min,
actual: timestamp,
},
);
}
#[cfg(not(disable_faketime))]
#[test]
fn test_timestamp_too_new() {
let faketime_file = faketime::millis_tempfile(100_000).expect("create faketime file");
faketime::enable(&faketime_file);
let fake_block_median_time_context = mock_median_time_context();
let max = unix_time_as_millis() + ALLOWED_FUTURE_BLOCKTIME;
let timestamp = max + 1;
let header = HeaderBuilder::default()
.number(10u64.pack())
.timestamp(timestamp.pack())
.build();
let timestamp_verifier = TimestampVerifier::new(&fake_block_median_time_context, &header);
assert_error_eq!(
timestamp_verifier.verify().unwrap_err(),
TimestampError::BlockTimeTooNew {
max,
actual: timestamp,
},
);
}
#[test]
fn test_number() {
let parent = HeaderBuilder::default().number(10u64.pack()).build();
let header = HeaderBuilder::default().number(10u64.pack()).build();
let verifier = NumberVerifier::new(&parent, &header);
assert_error_eq!(
verifier.verify().unwrap_err(),
NumberError {
expected: 11,
actual: 10,
},
);
}
struct FakePowEngine;
impl PowEngine for FakePowEngine {
fn verify(&self, _header: &Header) -> bool {
false
}
}
#[test]
fn test_pow_verifier() {
let header = HeaderBuilder::default().build();
let fake_pow_engine: &dyn PowEngine = &FakePowEngine;
let verifier = PowVerifier::new(&header, fake_pow_engine);
assert_error_eq!(verifier.verify().unwrap_err(), PowError::InvalidNonce);
}
|
//! The definition of session backends
mod cookie;
mod redis;
pub use self::cookie::CookieSessionBackend;
pub use self::imp::Backend;
#[cfg(feature = "redis-backend")]
pub use self::redis::RedisSessionBackend;
pub(crate) mod imp {
use futures::{Future, Poll};
use tsukuyomi::error::Error;
use tsukuyomi::input::Input;
use crate::session::SessionState;
pub trait ReadFuture {
fn poll_read(&mut self, input: &mut Input<'_>) -> Poll<SessionState, Error>;
}
impl<F> ReadFuture for F
where
F: Future<Item = SessionState, Error = Error>,
{
#[inline]
fn poll_read(&mut self, _: &mut Input<'_>) -> Poll<SessionState, Error> {
self.poll()
}
}
pub trait WriteFuture {
fn poll_write(&mut self, input: &mut Input<'_>) -> Poll<(), Error>;
}
impl<F> WriteFuture for F
where
F: Future<Item = (), Error = Error>,
{
#[inline]
fn poll_write(&mut self, _: &mut Input<'_>) -> Poll<(), Error> {
self.poll()
}
}
/// A trait representing the session backend.
///
/// Currently the detailed trait definition is private.
pub trait Backend: BackendImpl {}
pub trait BackendImpl {
type ReadFuture: ReadFuture + Send + 'static;
type WriteFuture: WriteFuture + Send + 'static;
fn read(&self, input: &mut Input<'_>) -> Self::ReadFuture;
fn write(&self, input: &mut Input<'_>, values: SessionState) -> Self::WriteFuture;
}
}
|
use std::io::Read;
use bayard_client::client::client::{create_client, Clerk};
use clap::ArgMatches;
use iron::headers::ContentType;
use iron::prelude::*;
use iron::typemap::Key;
use iron::{status, Chain, Iron, IronResult, Request, Response};
use logger::Logger;
use persistent::Write;
use router::Router;
use serde_json::Value;
use urlencoded::UrlEncodedQuery;
use crate::util::log::set_http_logger;
#[derive(Copy, Clone)]
pub struct Client;
impl Key for Client {
type Value = Clerk;
}
fn probe(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.probe();
Ok(Response::with((
ContentType::plaintext().0,
status::Ok,
value,
)))
}
fn peers(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.peers();
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn metrics(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.metrics();
Ok(Response::with((
ContentType::plaintext().0,
status::Ok,
value,
)))
}
fn get(req: &mut Request) -> IronResult<Response> {
let doc_id = req
.extensions
.get::<Router>()
.unwrap()
.find("doc_id")
.unwrap_or("")
.to_owned();
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.get(&doc_id);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn put(req: &mut Request) -> IronResult<Response> {
let doc_id = req
.extensions
.get::<Router>()
.unwrap()
.find("doc_id")
.unwrap_or("")
.to_owned();
let mut body = String::new();
req.body
.read_to_string(&mut body)
.expect("Failed to read line");
let mut doc_json: Value = serde_json::from_str(body.as_str()).unwrap();
doc_json["_id"] = Value::String(doc_id);
body = serde_json::to_string(&doc_json).unwrap();
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.put(&body);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn delete(req: &mut Request) -> IronResult<Response> {
let doc_id = req
.extensions
.get::<Router>()
.unwrap()
.find("doc_id")
.unwrap_or("")
.to_owned();
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.delete(&doc_id);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn bulk_put(req: &mut Request) -> IronResult<Response> {
let mut body = String::new();
req.body
.read_to_string(&mut body)
.expect("Failed to read line");
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.bulk_put(&body);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn bulk_delete(req: &mut Request) -> IronResult<Response> {
let mut body = String::new();
req.body
.read_to_string(&mut body)
.expect("Failed to read line");
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.bulk_delete(&body);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn commit(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.commit();
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn rollback(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.rollback();
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn merge(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.merge();
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn search(req: &mut Request) -> IronResult<Response> {
let map = req.get_ref::<UrlEncodedQuery>().unwrap().to_owned();
let query = map.get("query").unwrap().get(0).unwrap();
let mut from = 0;
if map.contains_key("from") {
from = map
.get("from")
.unwrap()
.get(0)
.unwrap_or(&String::from("0"))
.parse::<u64>()
.unwrap();
}
let mut limit = 10;
if map.contains_key("limit") {
limit = map
.get("limit")
.unwrap()
.get(0)
.unwrap_or(&String::from("10"))
.parse::<u64>()
.unwrap();
}
let exclude_count = map.contains_key("exclude_count");
let exclude_docs = map.contains_key("exclude_docs");
let mut facet_field: &str = "";
if map.contains_key("facet_field") {
facet_field = map.get("facet_field").unwrap().get(0).unwrap();
}
let mut facet_prefixes = Vec::new();
if map.contains_key("facet_prefix") {
facet_prefixes = map.get("facet_prefix").cloned().unwrap();
}
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.search(
query,
from,
limit,
exclude_count,
exclude_docs,
facet_field,
facet_prefixes,
);
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
fn schema(req: &mut Request) -> IronResult<Response> {
let client_arc = req.get::<Write<Client>>().unwrap();
let mut client = client_arc.lock().unwrap();
let value = client.schema();
Ok(Response::with((ContentType::json().0, status::Ok, value)))
}
pub fn run_gateway_cli(matches: &ArgMatches) -> Result<(), String> {
set_http_logger();
let host = matches.value_of("HOST").unwrap();
let port = matches.value_of("PORT").unwrap().parse::<u16>().unwrap();
let servers: Vec<_> = matches
.values_of("SERVERS")
.unwrap()
.map(|addr| create_client(addr))
.collect();
let addr = format!("{}:{}", host, port);
let client_id: u64 = rand::random();
let client = Clerk::new(&servers, client_id);
let (logger_before, logger_after) = Logger::new(None);
let mut router = Router::new();
router.get("/probe", probe, "probe");
router.get("/peers", peers, "peers");
router.get("/metrics", metrics, "metrics");
router.get("/index/docs/:doc_id", get, "get");
router.put("/index/docs/:doc_id", put, "put");
router.delete("/index/docs/:doc_id", delete, "delete");
router.put("/index/docs", bulk_put, "bulk_put");
router.delete("/index/docs", bulk_delete, "bulk_delete");
router.get("/index/search", search, "search");
router.get("/index/commit", commit, "commit");
router.get("/index/rollback", rollback, "rollback");
router.get("/index/merge", merge, "merge");
router.get("/index/schema", schema, "schema");
let mut chain = Chain::new(router);
chain.link_before(logger_before);
chain.link(Write::<Client>::both(client));
chain.link_after(logger_after);
Iron::new(chain).http(addr).unwrap();
Ok(())
}
|
mod map_union;
mod my_last_write_wins;
mod point;
mod with_bot;
pub use map_union::{MapUnionHashMapDeserializer, MapUnionHashMapWrapper};
|
use mongodb::Client;
use bson::{Document,Bson,oid};
use std::sync::Arc;
use std::sync::{Mutex,MutexGuard};
use mongodb::db::{ThreadedDatabase,Database};
use mongodb::ThreadedClient;
use iron::typemap::Key;
use model;
use service;
use serde::{Deserialize, Serialize, Deserializer};
use rustc_serialize::json;
use chrono::offset::local::Local;
use config::ConfigManager;
use std::result;
pub type Result<T> = result::Result<T, service::ServiceError>;
pub trait ToDoc {
fn get_name() -> &'static str;
}
impl ToDoc for model::Passenger {
fn get_name() -> &'static str {
"Passenger"
}
}
impl ToDoc for model::Owner {
fn get_name() -> &'static str {
"Owner"
}
}
impl ToDoc for model::Order {
fn get_name() -> &'static str {
"Order"
}
}
impl ToDoc for model::Trip {
fn get_name() -> &'static str {
"Trip"
}
}
impl ToDoc for model::Line {
fn get_name() -> &'static str {
"Line"
}
}
pub struct Dao(Client);
impl Dao {
pub fn new() -> Dao {
Dao(get_db())
}
pub fn add<T>(&self,t:T) -> Result<Option<Bson>> where T:ToDoc+Serialize{
let coll = self.get_db().collection(T::get_name());
coll.insert_one(service::en_bson(t).unwrap(),None).map(|r|r.inserted_id).map_err(|err|service::ServiceError::MongodbError(err))
}
pub fn delete<T>(&self,id:&str) where T:ToDoc+Serialize {
let coll = self.get_db().collection(T::get_name());
oid::ObjectId::with_string(id).map_err(|err|service::ServiceError::BsonOidError(err)).map(|o|Bson::ObjectId(o)).and_then(|oid|{
let mut doc = Document::new();
doc.insert("_id",oid);
coll.delete_one(doc,None);
Ok(())
});
}
pub fn delete_by_openid<T>(&self,id:&str) where T:ToDoc+Serialize {
let coll = self.get_db().collection(T::get_name());
let mut doc = Document::new();
doc.insert("openid",id);
coll.delete_many(doc,None);
}
pub fn delete_many_orders(&self,openids:Vec<&str>) {
let coll = self.get_db().collection("Order");
let mut doc = Document::new();
let openid_bson = openids.iter().map(|openid|Bson::String(openid.to_string())).collect();
doc.insert("$in",Bson::Array(openid_bson));
let mut doc1 = Document::new();
doc1.insert("openid",doc);
coll.delete_many(doc1,None);
}
pub fn add_history<T>(&self,t:T) where T:ToDoc+Serialize {
let coll_name:&str = &format!("{}_history",T::get_name());
let coll = self.get_db().collection(coll_name);
coll.insert_one(service::en_bson(t).unwrap(),None);
}
pub fn add_orders_history(&self,orders:Vec<model::Order>) {
let coll = self.get_db().collection("Order_history");
let docs = orders.iter().map(|order|{
service::en_bson(order.clone()).unwrap()
}).collect();
coll.insert_many(docs,None);
}
pub fn get_by_openid<T>(&self,openid:&str) -> Result<T> where T:ToDoc+Deserialize{
let coll = self.get_db().collection(T::get_name());
let mut doc = Document::new();
doc.insert("openid",openid.clone());
coll.find_one(Some(doc),None).map_err(|err|service::ServiceError::MongodbError(err)).and_then(|op|{
op.ok_or(service::ServiceError::Other("not find by this openid".to_string())).and_then(|doc|{
service::de_bson::<T>(doc)
})
})
}
pub fn get_by_id<T>(&self,id:&str) -> Result<T> where T:ToDoc+Deserialize {
let coll = self.get_db().collection(T::get_name());
let mut doc = Document::new();
oid::ObjectId::with_string(id).map_err(|err|service::ServiceError::BsonOidError(err)).map(|o|Bson::ObjectId(o)).and_then(|oid|{
doc.insert("_id",oid);
coll.find_one(Some(doc),None).map_err(|err|service::ServiceError::MongodbError(err)).and_then(|op|{
op.ok_or(service::ServiceError::Other("not find by this _id".to_string())).and_then(|doc|{
service::de_bson::<T>(doc)
})
})
})
}
pub fn get_trip_by_status(&self,status:&str) -> Vec<model::Trip>{
let coll = self.get_db().collection(model::Trip::get_name());
let mut doc = Document::new();
//let Bson::ObjectId(_id) = id;
doc.insert("status",status);
let mut data:Vec<model::Trip> = Vec::new();
if let Ok(c) = coll.find(Some(doc),None) {
for result in c {
let value = result.unwrap();
data.push(service::de_bson::<model::Trip>(value).unwrap());
}
}
data
}
pub fn get_all_lines(&self) -> Vec<model::Line> {
let coll = self.get_db().collection(model::Line::get_name());
coll.find(None,None).map(|cursor|{
cursor.map(|result| {
let value = result.unwrap();
service::de_bson::<model::Line>(value).unwrap()
}).collect()
}).unwrap()
}
pub fn get_line_by_id(&self,id:u32) -> Result<model::Line> {
let coll = self.get_db().collection(model::Line::get_name());
let mut doc = Document::new();
//let Bson::ObjectId(_id) = id;
doc.insert("id",id);
coll.find_one(Some(doc),None).map_err(|err|service::ServiceError::MongodbError(err)).and_then(|od|{
od.ok_or(service::ServiceError::Other("not find by this id".to_string())).and_then(|doc|{
service::de_bson::<model::Line>(doc)
})
})
}
pub fn get_hot_lines(&self) -> Vec<model::Line> {
let coll = self.get_db().collection(model::Line::get_name());
let mut doc = Document::new();
doc.insert("hot",true);
coll.find(Some(doc),None).map(|cursor|{
cursor.map(|result| {
let value = result.unwrap();
service::de_bson::<model::Line>(value).unwrap()
}).collect()
}).unwrap()
}
pub fn update_order(&self,order_id:&str,status:model::OrderStatus) -> Result<()> {
let coll = self.get_db().collection("Order");
let mut doc = Document::new();
let mut update_doc = Document::new();
let st:&str = &format!("{}",status);
update_doc.insert("$set",doc!{"status" => st});
doc.insert("openid",order_id);
coll.update_one(doc,update_doc,None).map(|_|()).map_err(|err|service::ServiceError::MongodbError(err))
}
pub fn set_current_seats(&self,id:&str,seats:u32) -> Result<()> {
let coll = self.get_db().collection("Trip");
let mut doc = Document::new();
let mut update_doc = Document::new();
update_doc.insert("$set",doc!{"current_seat" => seats});
oid::ObjectId::with_string(id).map_err(|err|service::ServiceError::BsonOidError(err)).map(|o|Bson::ObjectId(o)).and_then(|oid|{
doc.insert("_id",oid);
coll.update_one(doc,update_doc,None).map(|_|()).map_err(|err|service::ServiceError::MongodbError(err))
})
}
pub fn get_orders_by_trip_id(&self,trip_id:&str) -> Vec<model::Order> {
let coll = self.get_db().collection(model::Order::get_name());
let mut doc = Document::new();
doc.insert("trip_id",trip_id);
match coll.find(Some(doc),None).map(|cursor|{
cursor.map(|result| {
result.map_err(|err|service::ServiceError::MongodbError(err)).and_then(|res|{
service::de_bson::<model::Order>(res)
}).unwrap()
}).collect()
}) {
Ok(result) => result,
Err(err) => {
warn!("get order by trip error : {}",err);
Vec::new()
}
}
}
//db.Trip.update({"owner_id":"openid"},{"$set":{"status":"Finish"}})
pub fn update_status(&self,id:&str,status:model::TripStatus) -> Result<()> {
let coll = self.get_db().collection("Trip");
let mut doc = Document::new();
let mut update_doc = Document::new();
let st:&str = &format!("{}",status);
update_doc.insert("$set",doc!{"status" => st});
oid::ObjectId::with_string(id).map_err(|err|service::ServiceError::BsonOidError(err)).map(|o|Bson::ObjectId(o)).and_then(|oid|{
doc.insert("_id",oid);
coll.update_many(doc,update_doc,None).map(|_|()).map_err(|err|service::ServiceError::MongodbError(err))
})
}
fn get_db(&self) -> Database {
//let client = Client::connect("localhost", 27017)
//.ok().expect("Failed to initialize standalone client.");
let db_name = ConfigManager::get_config_str("app", "dbname");
let db_user = ConfigManager::get_config_str("app", "dbuser");
let db_pwd = ConfigManager::get_config_str("app", "dbpwd");
let db = self.0.db(&db_name);
db.auth(&db_user,&db_pwd).unwrap();
db
}
}
impl Key for Dao {
type Value = Dao;
}
impl Clone for Dao {
fn clone(&self) -> Dao {
Dao(self.0.clone())
}
}
fn get_db() -> Client {
let client = Client::connect("localhost", 27017)
.ok().expect("Failed to initialize standalone client.");
//let db_name = ConfigManager::get_config_str("app", "dbname");
//let db_user = ConfigManager::get_config_str("app", "dbuser");
//let db_pwd = ConfigManager::get_config_str("app", "dbpwd");
//let db = client.db(&db_name);
//db.auth(&db_user,&db_pwd).unwrap();
//db
client
}
|
// local imports
use charts::SourceSeries;
/// Utility functions.
/// Get lowest value among `length` data points in given `source`.
pub fn lowest(source: SourceSeries, length: usize) -> Option<f64> {
if length <= 0 {
return None;
}
let lowest = source.get(0);
if lowest.is_none() {
return None;
}
let mut lowest = source.get(0).unwrap();
for index in 1..(length) {
match source.get(index) {
None => {}
Some(maybe_lowest) => if maybe_lowest < lowest {
lowest = maybe_lowest;
},
}
}
Some(lowest)
}
/// Get highest value among `length` data points in given `source`.
pub fn highest(source: SourceSeries, length: usize) -> Option<f64> {
if length <= 0 {
return None;
}
let highest = source.get(0);
if highest.is_none() {
return None;
}
let mut highest = highest.unwrap();
for index in 1..(length) {
match source.get(index) {
None => {}
Some(maybe_highest) => if maybe_highest > highest {
highest = maybe_highest;
},
}
}
Some(highest)
}
|
use std::fmt::Display;
pub trait Summary {
fn author(&self) -> String;
fn summarize(&self) -> String {
format!("Similar articles by {}", self.author())
}
}
pub struct NewsArticle {
pub headline: String,
pub location: String,
pub author: String,
pub content: String,
}
impl Summary for NewsArticle {
fn author(&self) -> String {
format!("{}", self.author)
}
fn summarize(&self) -> String {
format!("{}, by {} ({})", self.headline, self.author, self.location)
}
}
pub struct Tweet {
pub username: String,
pub content: String,
pub reply: bool,
pub retweet: bool,
}
impl Summary for Tweet {
fn author(&self) -> String {
format!("@{}", self.username)
}
// fn summarize(&self) -> String {
// format!("{}: {}", self.username, self.content)
// }
}
// //takes a value that implements the summary trait
// pub fn notify(item: &impl Summary) {
// println!("Breaking news! {}", item.summarize());
// }
// //similar to impl trait without syntactic sugar
// pub fn notify<T: Summary>(item: &T) {
// println!("Breaking news! {}", item.summarize());
// }
// //using + to add more trait bounds
// pub fn notify(item: &(impl Summary + Display)) {}
//this is cumbersome
// fn some_function<T: Display + Clone, U: Clone + Debug>(t: &T, u: &U) -> i32 {}
// //this is simpler
// fn some_function<T, U>(t: &T, u: &U) -> i32
// where T: Display + Clone,
// U: Clone + Debug
// {}
struct Pair<T> {
x:T,
y:T,
}
impl <T> Pair<T>{
fn new(x:T,y:T) -> Self{
Self{x,y}
}
}
impl <T: Display + PartialOrd> Pair<T>{
fn cmp_display(&self) {
if self.x >= self.y {
println!("The largest number is x {}",self.x);
}else{
println!("The largest number is y {}",self.y);
}
}
}
fn main() {
let tweet = Tweet {
username: String::from("horse_ebooks"),
content: String::from("of course, as you probably already know, people"),
reply: false,
retweet: false,
};
println!("you have 1 new tweet: {}", tweet.summarize());
let article = NewsArticle {
author: String::from("Martin Thuranira"),
content: String::from("Getting inot the weeds with rust"),
location: "Nairobi".to_string(),
headline: "Breaking News".to_string(),
};
println!("you have 1 new article :{}", article.summarize());
}
|
#![cfg_attr(RUSTC_WITH_SPECIALIZATION, feature(min_specialization))]
#![allow(clippy::integer_arithmetic)]
#![allow(clippy::mut_from_ref)]
mod bucket;
mod bucket_item;
pub mod bucket_map;
mod bucket_stats;
mod bucket_storage;
mod index_entry;
pub type MaxSearch = u8;
pub type RefCount = u64;
|
//! Render utilities which don't belong anywhere else.
use std::fmt::{Display, Formatter, Result};
pub fn as_display<F: Fn(&mut Formatter<'_>) -> Result>(f: F) -> impl Display {
struct ClosureDisplay<F: Fn(&mut Formatter<'_>) -> Result>(F);
impl<F: Fn(&mut Formatter<'_>) -> Result> Display for ClosureDisplay<F> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
self.0(f)
}
}
ClosureDisplay(f)
}
macro_rules! write_joined_non_empty_list {
($f:expr,$template:tt,$list:expr,$sep:expr) => {{
let mut x = $list.into_iter().peekable();
if x.peek().is_some() {
write!($f, $template, x.format($sep))
} else {
Ok(())
}
}};
}
|
use core::{
convert::TryFrom,
fmt,
};
use std::io;
use crate::version::Version;
use crate::consts::*;
/// A handle of a device connection.
///
/// The connection between the programmer and the system may break at any time.
/// When this happens, methods of `Handle` should return `Err` values other than
/// normal `Ok` with results.
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub struct Handle<'h> {
inner: nihao_usb::Handle<'h>,
version: Version,
}
impl<'h> Handle<'h> {
pub fn version(&self) -> Version {
self.version
}
pub fn get_voltage(&self) -> io::Result<Option<f32>> {
if !self.version.has_trace {
return Ok(None);
}
let r = crate::command::command(&self.inner, STLINK_GET_TARGET_VOLTAGE, 0, 8)?;
let adc_result = [
u32::from_le_bytes([r[0], r[1], r[2], r[3]]),
u32::from_le_bytes([r[4], r[5], r[6], r[7]]),
];
Ok(Some(if adc_result[0] != 0 {
2.0 * (adc_result[1] as f32) * 1.2 / (adc_result[0] as f32)
} else {
0.0
} ))
}
pub fn get_mode(&self) -> io::Result<u8> {
let r = crate::command::command(&self.inner, STLINK_GET_CURRENT_MODE, 0, 2)?;
Ok(r[0])
}
}
// //todo: bug?
// impl Drop for Handle<'_> {
// fn drop(&mut self) {
// crate::command::command(&self.inner, STLINK_DEBUG_APIV2_RESETSYS, 0x80, 2).unwrap();
// }
// }
impl<'h> AsRef<nihao_usb::Handle<'h>> for Handle<'h> {
fn as_ref(&self) -> &nihao_usb::Handle<'h> {
&self.inner
}
}
#[derive(Debug)]
pub enum TryFromHandleError {
InvalidVendorProductId(u16, u16),
IoError(io::Error),
}
impl std::error::Error for TryFromHandleError {}
impl fmt::Display for TryFromHandleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl From<io::Error> for TryFromHandleError {
fn from(src: io::Error) -> TryFromHandleError {
TryFromHandleError::IoError(src)
}
}
impl From<TryFromHandleError> for io::Error {
fn from(src: TryFromHandleError) -> io::Error {
io::Error::new(io::ErrorKind::Other, src)
}
}
impl<'h> TryFrom<nihao_usb::Handle<'h>> for Handle<'h> {
type Error = (nihao_usb::Handle<'h>, TryFromHandleError);
fn try_from(src: nihao_usb::Handle<'h>) -> Result<Handle<'h>, Self::Error> {
use TryFromHandleError::*;
let desc = match src.device_descriptor() {
Ok(desc) => desc,
Err(err) => return Err((src, err.into())),
};
if desc.id_vendor != STLINK_VID || desc.id_product != STLINK_V2_PID {
return Err((src, InvalidVendorProductId(desc.id_vendor, desc.id_product)))
}
// get version
let version = match crate::version::read_handle(&src) {
Ok(ver) => ver,
Err(err) => return Err((src, err.into())),
};
let ans = Handle { inner: src, version };
Ok(ans)
}
}
|
extern crate glutin_window;
extern crate graphics;
extern crate opengl_graphics;
extern crate piston;
use ::piston::event_loop::*;
use ::piston::window::WindowSettings;
use glutin_window::GlutinWindow;
use opengl_graphics::{GlGraphics, OpenGL};
use piston::input::*;
use std::collections::LinkedList;
use std::iter::FromIterator;
struct Game {
gl: GlGraphics,
snake: Snake,
}
struct Snake {
body:LinkedList<(i32,i32)>,
dir: Direction,
}
#[derive(Clone, PartialEq)]
enum Direction {
Right,
Up,
Left,
Down,
}
impl Snake {
fn render(&self, gl: &mut GlGraphics, args: &RenderArgs) {
use graphics;
let RED: [f32; 4] = [1.0, 0.0, 0.0, 1.0];
let squares : Vec<graphics::types::Rectangle> = self.body
.iter()
.map(|&(x,y)| {
graphics::rectangle::square(
(x*20) as f64,
(y*20) as f64,
20_f64
)
}).collect();
gl.draw(args.viewport(), |c, gl| {
let transform = c.transform;
squares.into_iter()
.for_each(|square| graphics::rectangle(RED,square,transform,gl))
});
}
fn update(&mut self) {
let mut new_head = (*self.body.front().expect("no body")).clone();
match self.dir {
Direction::Left => new_head.0 -= 1,
Direction::Right => new_head.0 += 1,
Direction::Up => new_head.1 -= 1,
Direction::Down => new_head.1 += 1,
}
self.body.push_front(new_head);
self.body.pop_back().unwrap();
}
}
impl Game {
fn render(&mut self, arg: &RenderArgs) {
use graphics::*;
const GREEN: [f32; 4] = [0.0, 1.0, 0.0, 1.0];
self.gl.draw(arg.viewport(), |_c, gl| {
graphics::clear(GREEN, gl);
});
self.snake.render(&mut self.gl, arg);
}
fn update(&mut self) {
self.snake.update()
}
fn pressed(&mut self, btn: &Button) {
let last_direction = self.snake.dir.clone();
self.snake.dir = match btn {
&Button::Keyboard(Key::Up) if last_direction != Direction::Down => Direction::Up,
&Button::Keyboard(Key::Down) if last_direction != Direction::Up => Direction::Down,
&Button::Keyboard(Key::Left) if last_direction != Direction::Right => Direction::Left,
&Button::Keyboard(Key::Right) if last_direction != Direction::Left => Direction::Right,
_ => last_direction
}
}
}
fn main() {
let opengl = OpenGL::V3_2;
let mut window: GlutinWindow = WindowSettings::new("Snake Game", [500, 500])
.opengl(opengl)
.exit_on_esc(true)
.build()
.unwrap();
let mut game = Game {
gl: GlGraphics::new(opengl),
snake: Snake {
body:LinkedList::from_iter((vec![(0,0),(0,1)]).into_iter()),
dir: Direction::Down,
},
};
let mut events = Events::new(EventSettings::new()).ups(5);
while let Some(e) = events.next(&mut window) {
if let Some(args) = e.render_args() {
game.render(&args);
}
if let Some(u) = e.update_args() {
game.update();
}
if let Some(k) = e.button_args() {
if k.state == ButtonState::Press {
game.pressed(&k.button);
}
}
}
}
|
use crate::geometry::{FDisplacement, FPoint};
use crate::input::cursor::CursorManager;
use crate::input::keyboard::Keyboard;
use std::rc::Rc;
use wlroots_sys::*;
use xkbcommon::xkb;
// NOTE Taken from linux/input-event-codes.h
// TODO Find a way to automatically parse and fetch from there.
pub const BTN_LEFT: u32 = 0x110;
pub const BTN_RIGHT: u32 = 0x111;
pub const BTN_MIDDLE: u32 = 0x112;
pub const BTN_SIDE: u32 = 0x113;
pub const BTN_EXTRA: u32 = 0x114;
pub const BTN_FORWARD: u32 = 0x115;
pub const BTN_BACK: u32 = 0x116;
pub const BTN_TASK: u32 = 0x117;
pub trait InputEvent {
/// Get the timestamp of this event
fn time_msec(&self) -> u32;
/// Get the raw pointer to the device that fired this event
fn raw_device(&self) -> *mut wlr_input_device;
}
pub trait CursorEvent {
/// Get the position of the cursor in global coordinates
fn position(&self) -> FPoint;
/// Get the change from the last positional value
///
/// Note you should not cast this to a type with less precision,
/// otherwise you'll lose important motion data which can cause bugs
/// (e.g see [this fun wlc bug](https://github.com/Cloudef/wlc/issues/181)).
fn delta(&self) -> FDisplacement;
fn delta_unaccel(&self) -> FDisplacement;
}
/// Event that triggers when the pointer device scrolls (e.g using a wheel
/// or in the case of a touchpad when you use two fingers to scroll)
pub struct AxisEvent {
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_axis,
}
impl AxisEvent {
pub(crate) unsafe fn from_ptr(
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_axis,
) -> Self {
AxisEvent {
cursor_manager,
event,
}
}
/// Get the raw pointer to this event
pub fn raw_event(&self) -> *const wlr_event_pointer_axis {
self.event
}
pub fn source(&self) -> wlr_axis_source {
unsafe { (*self.event).source }
}
pub fn orientation(&self) -> wlr_axis_orientation {
unsafe { (*self.event).orientation }
}
/// Get the change from the last axis value
///
/// Useful to determine e.g how much to scroll.
pub fn delta(&self) -> f64 {
unsafe { (*self.event).delta }
}
pub fn delta_discrete(&self) -> i32 {
unsafe { (*self.event).delta_discrete }
}
}
impl InputEvent for AxisEvent {
fn raw_device(&self) -> *mut wlr_input_device {
unsafe { (*self.event).device }
}
fn time_msec(&self) -> u32 {
unsafe { (*self.event).time_msec }
}
}
impl CursorEvent for AxisEvent {
fn position(&self) -> FPoint {
self.cursor_manager.position()
}
fn delta(&self) -> FDisplacement {
FDisplacement::ZERO
}
fn delta_unaccel(&self) -> FDisplacement {
CursorEvent::delta(self)
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum ButtonState {
Released,
Pressed,
}
impl ButtonState {
pub fn from_raw(state: wlr_button_state) -> ButtonState {
if state == wlr_button_state_WLR_BUTTON_RELEASED {
ButtonState::Released
} else {
ButtonState::Pressed
}
}
pub fn as_raw(&self) -> wlr_button_state {
match self {
ButtonState::Released => wlr_button_state_WLR_BUTTON_RELEASED,
ButtonState::Pressed => wlr_button_state_WLR_BUTTON_PRESSED,
}
}
}
/// Event that triggers when a button is pressed (e.g left click, right click,
/// a gaming mouse button, etc.)
pub struct ButtonEvent {
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_button,
}
impl ButtonEvent {
pub(crate) unsafe fn from_ptr(
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_button,
) -> Self {
ButtonEvent {
cursor_manager,
event,
}
}
/// Get the raw pointer to this event
pub fn raw_event(&self) -> *const wlr_event_pointer_button {
self.event
}
/// Get the state of the button (e.g pressed or released)
pub fn state(&self) -> ButtonState {
ButtonState::from_raw(unsafe { (*self.event).state })
}
/// Get the value of the button pressed. This will generally be an
/// atomically increasing value, with e.g left click being 1 and right
/// click being 2...
///
/// We make no guarantees that 1 always maps to left click, as this is
/// device driver specific.
pub fn button(&self) -> u32 {
unsafe { (*self.event).button }
}
}
impl InputEvent for ButtonEvent {
fn raw_device(&self) -> *mut wlr_input_device {
unsafe { (*self.event).device }
}
fn time_msec(&self) -> u32 {
unsafe { (*self.event).time_msec }
}
}
impl CursorEvent for ButtonEvent {
fn position(&self) -> FPoint {
self.cursor_manager.position()
}
fn delta(&self) -> FDisplacement {
FDisplacement::ZERO
}
fn delta_unaccel(&self) -> FDisplacement {
self.delta()
}
}
/// Event that triggers when the pointer moves
pub enum MotionEvent {
Relative(RelativeMotionEvent),
Absolute(AbsoluteMotionEvent),
}
impl InputEvent for MotionEvent {
fn raw_device(&self) -> *mut wlr_input_device {
match self {
MotionEvent::Relative(event) => event.raw_device(),
MotionEvent::Absolute(event) => event.raw_device(),
}
}
fn time_msec(&self) -> u32 {
match self {
MotionEvent::Relative(event) => event.time_msec(),
MotionEvent::Absolute(event) => event.time_msec(),
}
}
}
impl CursorEvent for MotionEvent {
fn position(&self) -> FPoint {
match self {
MotionEvent::Relative(event) => event.position(),
MotionEvent::Absolute(event) => event.position(),
}
}
fn delta(&self) -> FDisplacement {
match self {
MotionEvent::Relative(event) => event.delta(),
MotionEvent::Absolute(event) => event.delta(),
}
}
fn delta_unaccel(&self) -> FDisplacement {
match self {
MotionEvent::Relative(event) => event.delta_unaccel(),
MotionEvent::Absolute(event) => event.delta_unaccel(),
}
}
}
pub struct RelativeMotionEvent {
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_motion,
}
impl RelativeMotionEvent {
pub(crate) unsafe fn from_ptr(
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_motion,
) -> Self {
RelativeMotionEvent {
cursor_manager,
event,
}
}
/// Get the raw pointer to this event
pub fn raw_event(&self) -> *const wlr_event_pointer_motion {
self.event
}
}
impl InputEvent for RelativeMotionEvent {
fn raw_device(&self) -> *mut wlr_input_device {
unsafe { (*self.event).device }
}
fn time_msec(&self) -> u32 {
unsafe { (*self.event).time_msec }
}
}
impl CursorEvent for RelativeMotionEvent {
fn position(&self) -> FPoint {
self.cursor_manager.position() + self.delta()
}
fn delta(&self) -> FDisplacement {
unsafe {
FDisplacement {
dx: (*self.event).delta_x,
dy: (*self.event).delta_y,
}
}
}
fn delta_unaccel(&self) -> FDisplacement {
unsafe {
FDisplacement {
dx: (*self.event).unaccel_dx,
dy: (*self.event).unaccel_dy,
}
}
}
}
pub struct AbsoluteMotionEvent {
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_motion_absolute,
}
impl AbsoluteMotionEvent {
pub(crate) unsafe fn from_ptr(
cursor_manager: Rc<CursorManager>,
event: *const wlr_event_pointer_motion_absolute,
) -> Self {
AbsoluteMotionEvent {
cursor_manager,
event,
}
}
/// Get the raw pointer to this event
pub fn raw_event(&self) -> *const wlr_event_pointer_motion_absolute {
self.event
}
}
impl InputEvent for AbsoluteMotionEvent {
fn raw_device(&self) -> *mut wlr_input_device {
unsafe { (*self.event).device }
}
fn time_msec(&self) -> u32 {
unsafe { (*self.event).time_msec }
}
}
impl CursorEvent for AbsoluteMotionEvent {
fn position(&self) -> FPoint {
unsafe {
let mut x = 0.0;
let mut y = 0.0;
wlr_cursor_absolute_to_layout_coords(
self.cursor_manager.raw_cursor(),
self.raw_device(),
(*self.event).x,
(*self.event).y,
&mut x,
&mut y,
);
FPoint { x, y }
}
}
fn delta(&self) -> FDisplacement {
self.position() - self.cursor_manager.position()
}
fn delta_unaccel(&self) -> FDisplacement {
self.delta()
}
}
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum KeyState {
Released,
Pressed,
}
pub struct KeyboardEvent<'a> {
keyboard: &'a Keyboard,
event: *const wlr_event_keyboard_key,
}
impl<'a> KeyboardEvent<'a> {
pub(crate) unsafe fn from_ptr(
keyboard: &'a Keyboard,
event: *const wlr_event_keyboard_key,
) -> KeyboardEvent {
KeyboardEvent { keyboard, event }
}
pub fn libinput_keycode(&self) -> xkb::Keycode {
unsafe { (*self.event).keycode }
}
pub fn xkb_keycode(&self) -> xkb::Keycode {
// Translate libinput keycode -> xkbcommon
unsafe { (*self.event).keycode + 8 }
}
pub fn xkb_state(&self) -> xkb::State {
self.keyboard.xkb_state()
}
pub fn raw_state(&self) -> wlr_key_state {
unsafe { (*self.event).state }
}
pub fn state(&self) -> KeyState {
if self.raw_state() == wlr_key_state_WLR_KEY_PRESSED {
KeyState::Pressed
} else {
KeyState::Released
}
}
/// Get the single keysym obtained from pressing a particular key in
/// a given keyboard state.
///
/// This function is similar to xkb_state_key_get_syms(), but intended
/// for users which cannot or do not want to handle the case where
/// multiple keysyms are returned (in which case this function is preferred).
///
/// Returns the keysym. If the key does not have exactly one keysym,
/// returns xkb::KEY_NoSymbol
pub fn get_one_sym(&self) -> xkb::Keysym {
self
.keyboard
.xkb_state()
.key_get_one_sym(self.xkb_keycode())
}
}
impl<'a> InputEvent for KeyboardEvent<'a> {
fn raw_device(&self) -> *mut wlr_input_device {
self.keyboard.device().raw_ptr()
}
fn time_msec(&self) -> u32 {
unsafe { (*self.event).time_msec }
}
}
|
// Copyright 2017 tokio-jsonrpc Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! A clients that requests the time.
//!
//! A client requesting time from a server on localhost:2345.
//! It will inovoke the "now" method, which will return the current
//! unix timestamp (number of seconds since 1.1. 1970).
extern crate futures;
extern crate serde_json;
#[macro_use]
extern crate slog;
extern crate slog_async;
extern crate slog_term;
extern crate tokio_core;
extern crate tokio_io;
extern crate tokio_jsonrpc;
use std::time::Duration;
use futures::Future;
use tokio_core::reactor::Core;
use tokio_core::net::TcpStream;
use tokio_io::AsyncRead;
use slog::{Drain, Logger};
use tokio_jsonrpc::{Endpoint, LineCodec};
use tokio_jsonrpc::message::Response;
fn main() {
// An application logger
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::CompactFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
let logger = Logger::root(drain, o!("app" => "Time client example"));
info!(logger, "Starting up");
// Usual setup of an asyncclient
let mut core = Core::new().unwrap();
let handle = core.handle();
// Connect to a server socket
let socket = TcpStream::connect(&"127.0.0.1:2345".parse().unwrap(), &handle);
let client = socket.and_then(|socket| {
// Create a client endpoint
let (client, _) = Endpoint::client_only(socket.framed(LineCodec::new()))
.logger(logger.new(o!("client" => 1)))
.start(&handle);
info!(logger, "Calling rpc");
client
.call("now".to_owned(), None, Some(Duration::from_secs(5)))
.and_then(|(_client, response)| response)
.map(|x| match x {
// Received an error from the server,
Some(Response {
result: Ok(result), ..
}) => {
let r: u64 = serde_json::from_value(result).unwrap();
info!(logger, "received response"; "result" => format!("{:?}", r));
},
// Received an error from the server,
Some(Response {
result: Err(err), ..
}) => {
info!(logger, "remote error"; "error" => format!("{:?}", err));
},
// Timeout
None => info!(logger, "timeout"),
})
});
// Run the whole thing
core.run(client).unwrap();
}
|
#[doc = "Reader of register EP_ACTIVE"]
pub type R = crate::R<u32, super::EP_ACTIVE>;
#[doc = "Writer for register EP_ACTIVE"]
pub type W = crate::W<u32, super::EP_ACTIVE>;
#[doc = "Register EP_ACTIVE `reset()`'s with value 0"]
impl crate::ResetValue for super::EP_ACTIVE {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `EP1_ACT`"]
pub type EP1_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP1_ACT`"]
pub struct EP1_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP1_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `EP2_ACT`"]
pub type EP2_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP2_ACT`"]
pub struct EP2_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP2_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `EP3_ACT`"]
pub type EP3_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP3_ACT`"]
pub struct EP3_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP3_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `EP4_ACT`"]
pub type EP4_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP4_ACT`"]
pub struct EP4_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP4_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `EP5_ACT`"]
pub type EP5_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP5_ACT`"]
pub struct EP5_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP5_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `EP6_ACT`"]
pub type EP6_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP6_ACT`"]
pub struct EP6_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP6_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `EP7_ACT`"]
pub type EP7_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP7_ACT`"]
pub struct EP7_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP7_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `EP8_ACT`"]
pub type EP8_ACT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EP8_ACT`"]
pub struct EP8_ACT_W<'a> {
w: &'a mut W,
}
impl<'a> EP8_ACT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep1_act(&self) -> EP1_ACT_R {
EP1_ACT_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep2_act(&self) -> EP2_ACT_R {
EP2_ACT_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep3_act(&self) -> EP3_ACT_R {
EP3_ACT_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep4_act(&self) -> EP4_ACT_R {
EP4_ACT_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep5_act(&self) -> EP5_ACT_R {
EP5_ACT_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep6_act(&self) -> EP6_ACT_R {
EP6_ACT_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep7_act(&self) -> EP7_ACT_R {
EP7_ACT_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep8_act(&self) -> EP8_ACT_R {
EP8_ACT_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep1_act(&mut self) -> EP1_ACT_W {
EP1_ACT_W { w: self }
}
#[doc = "Bit 1 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep2_act(&mut self) -> EP2_ACT_W {
EP2_ACT_W { w: self }
}
#[doc = "Bit 2 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep3_act(&mut self) -> EP3_ACT_W {
EP3_ACT_W { w: self }
}
#[doc = "Bit 3 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep4_act(&mut self) -> EP4_ACT_W {
EP4_ACT_W { w: self }
}
#[doc = "Bit 4 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep5_act(&mut self) -> EP5_ACT_W {
EP5_ACT_W { w: self }
}
#[doc = "Bit 5 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep6_act(&mut self) -> EP6_ACT_W {
EP6_ACT_W { w: self }
}
#[doc = "Bit 6 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep7_act(&mut self) -> EP7_ACT_W {
EP7_ACT_W { w: self }
}
#[doc = "Bit 7 - Indicates that Endpoint is currently active."]
#[inline(always)]
pub fn ep8_act(&mut self) -> EP8_ACT_W {
EP8_ACT_W { w: self }
}
}
|
use std::iter::repeat;
fn rec_can_make_word(index: usize, word: &str, blocks: &[&str], used: &mut[bool]) -> bool {
let c = word.chars().nth(index).unwrap().to_uppercase().next().unwrap();
for i in 0..blocks.len() {
if !used[i] && blocks[i].chars().any(|s| s == c) {
used[i] = true;
if index == 0 || rec_can_make_word(index - 1, word, blocks, used) {
return true;
}
used[i] = false;
}
}
false
}
fn can_make_word(word: &str, blocks: &[&str]) -> bool {
return rec_can_make_word(word.chars().count() - 1, word, blocks,
&mut repeat(false).take(blocks.len()).collect::<Vec<_>>());
}
fn main() {
let blocks = [("BO"), ("XK"), ("DQ"), ("CP"), ("NA"), ("GT"), ("RE"), ("TG"), ("QD"), ("FS"),
("JW"), ("HU"), ("VI"), ("AN"), ("OB"), ("ER"), ("FS"), ("LY"), ("PC"), ("ZM")];
let words = ["A", "BARK", "BOOK", "TREAT", "COMMON", "SQUAD", "CONFUSE"];
for word in &words {
println!("{} -> {}", word, can_make_word(word, &blocks))
}
} |
#[doc = "Register `HDP1R_CUR` reader"]
pub type R = crate::R<HDP1R_CUR_SPEC>;
#[doc = "Field `HDP1_STRT` reader - HDPL barrier start set in number of 8 Kbytes sectors"]
pub type HDP1_STRT_R = crate::FieldReader;
#[doc = "Field `HDP1_END` reader - HDPL barrier end set in number of 8 Kbytes sectors"]
pub type HDP1_END_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:2 - HDPL barrier start set in number of 8 Kbytes sectors"]
#[inline(always)]
pub fn hdp1_strt(&self) -> HDP1_STRT_R {
HDP1_STRT_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 16:18 - HDPL barrier end set in number of 8 Kbytes sectors"]
#[inline(always)]
pub fn hdp1_end(&self) -> HDP1_END_R {
HDP1_END_R::new(((self.bits >> 16) & 7) as u8)
}
}
#[doc = "FLASH HDP Bank1 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hdp1r_cur::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HDP1R_CUR_SPEC;
impl crate::RegisterSpec for HDP1R_CUR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hdp1r_cur::R`](R) reader structure"]
impl crate::Readable for HDP1R_CUR_SPEC {}
#[doc = "`reset()` method sets HDP1R_CUR to value 0"]
impl crate::Resettable for HDP1R_CUR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::{
Parser,
ParseRule,
OwnedParse,
ParseContext,
expressions::ExpressionParser,
type_::TypeParser,
};
use core::pos::BiPos;
use ir::{
Chunk,
hir::HIRInstruction
};
use ir_traits::WriteInstruction;
use lexer::tokens::{
TokenType,
TokenData,
};
use notices::{
DiagnosticSourceBuilder,
DiagnosticLevel,
};
pub struct LocalVarParser;
impl ParseRule for LocalVarParser{
fn parse(parser: &mut Parser) -> Result<(),()>{
let mut chunk = Chunk::new();
if parser.context != ParseContext::Local{
let source = match parser.request_source_snippet(parser.current_token().pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(format!("Found 'let' outside of local context. This is illegal."))
.range(parser.current_token().pos.col_range())
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[diag_source]);
return Err(())
}
match parser.check_consume(TokenType::KwLet){
Ok(true) => {},
Ok(false) => {
let source = match parser.request_source_snippet(parser.current_token().pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(format!("Expected keyword 'let' for defining an local variable."))
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[diag_source]);
return Err(())
}
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
}
chunk.write_instruction(HIRInstruction::LocalVar);
let pos = parser.current_token().pos;
chunk.write_pos(pos);
match parser.check_consume_next(TokenType::KwMut){
Ok(true) => {
chunk.write_bool(true);
chunk.write_pos(parser.current_token().pos);
}
Ok(false) => {
chunk.write_bool(false);
chunk.write_pos(BiPos::default());
}
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
}
if !parser.check(TokenType::Identifier) {
let message = format!(
"Expected an identifier token, but instead got {}",
parser.current_token()
);
let source = match parser.request_source_snippet(parser.current_token().pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(message)
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[diag_source]);
return Err(());
}
let name = match &parser.current_token().data {
TokenData::String(s) => (*s).to_string(),
_ => {
let message = format!(
"Failed to extract string data from identifier token.",
);
let source = match parser.request_source_snippet(parser.current_token().pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(message)
.range(parser.current_token().pos.col_range())
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[diag_source]);
return Err(());
}
};
chunk.write_pos(parser.current_token().pos);
chunk.write_string(name.clone());
if parser.next_token().type_ == TokenType::Colon {
match TypeParser::get_type(parser){
Ok(t) =>chunk.write_chunk(t),
Err(diag) => {
let message = format!(
"Could not parse type signature for property.",
);
let source = match parser.request_source_snippet(parser.current_token().pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(message)
.range(parser.current_token().pos.col_range())
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[diag, diag_source]);
return Err(());
}
}
} else {
if let Err(source) = parser.advance(){
parser.emit_parse_diagnostic(&[], &[source]);
return Err(())
};
chunk.write_pos(parser.current_token().pos);
chunk.write_instruction(HIRInstruction::Unknown);
}
parser.emit_ir_whole(chunk);
if let Ok(false) = parser.check_consume(TokenType::Equal) {
let found_token = parser.current_token();
let data = &found_token.data;
let cause_message = format!("Expected '=' but instead got {:?}", data);
let cause_source = match parser.request_source_snippet(found_token.pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let cause_diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(cause_message)
.range(parser.current_token().pos.col_range())
.source(cause_source)
.build();
let message = format!("Local must be initialized");
let source = match parser.request_source_snippet(found_token.pos){
Ok(source) => source,
Err(diag) => {
parser.emit_parse_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(parser.name.clone(), parser.current_token().pos.start.0)
.level(DiagnosticLevel::Error)
.message(message)
.range(parser.current_token().pos.col_range())
.source(source)
.build();
parser.emit_parse_diagnostic(&[], &[cause_diag_source, diag_source]);
return Err(());
}
match ExpressionParser::owned_parse(parser) {
Ok(expr) => parser.emit_ir_whole(expr),
Err(cause) => {
parser.emit_parse_diagnostic(&[], &[cause]);
return Err(())
}
}
Ok(())
}
} |
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::borrow::Borrow;
use std::borrow::Cow;
use std::cell::Cell;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::ffi::OsStr;
use std::ffi::OsString;
use std::hash::BuildHasher;
use std::hash::Hash;
use std::mem::size_of;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use bstr::BStr;
use bstr::BString;
use bumpalo::Bump;
use indexmap::IndexMap;
use indexmap::IndexSet;
use crate::block;
use crate::from;
use crate::Allocator;
use crate::FromError;
use crate::FromOcamlRep;
use crate::FromOcamlRepIn;
use crate::ToOcamlRep;
use crate::Value;
macro_rules! trivial_from_in_impl {
($ty:ty) => {
impl<'a> FromOcamlRepIn<'a> for $ty {
fn from_ocamlrep_in(value: Value<'_>, _alloc: &'a Bump) -> Result<Self, FromError> {
Self::from_ocamlrep(value)
}
}
};
}
const WORD_SIZE: usize = std::mem::size_of::<Value<'_>>();
impl ToOcamlRep for () {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int(0)
}
}
impl FromOcamlRep for () {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
match from::expect_int(value)? {
0 => Ok(()),
x => Err(FromError::ExpectedUnit(x)),
}
}
}
trivial_from_in_impl!(());
impl ToOcamlRep for isize {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int(*self)
}
}
impl FromOcamlRep for isize {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
from::expect_int(value)
}
}
trivial_from_in_impl!(isize);
impl ToOcamlRep for usize {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).try_into().unwrap())
}
}
impl FromOcamlRep for usize {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(from::expect_int(value)?.try_into()?)
}
}
trivial_from_in_impl!(usize);
impl ToOcamlRep for i64 {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).try_into().unwrap())
}
}
impl FromOcamlRep for i64 {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(from::expect_int(value)?.try_into()?)
}
}
trivial_from_in_impl!(i64);
impl ToOcamlRep for u64 {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).try_into().unwrap())
}
}
impl FromOcamlRep for u64 {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(from::expect_int(value)?.try_into()?)
}
}
trivial_from_in_impl!(u64);
impl ToOcamlRep for i32 {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).try_into().unwrap())
}
}
impl FromOcamlRep for i32 {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(from::expect_int(value)?.try_into()?)
}
}
trivial_from_in_impl!(i32);
impl ToOcamlRep for u32 {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).try_into().unwrap())
}
}
impl FromOcamlRep for u32 {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(from::expect_int(value)?.try_into()?)
}
}
trivial_from_in_impl!(u32);
impl ToOcamlRep for bool {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
Value::int((*self).into())
}
}
impl FromOcamlRep for bool {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
match from::expect_int(value)? {
0 => Ok(false),
1 => Ok(true),
x => Err(FromError::ExpectedBool(x)),
}
}
}
trivial_from_in_impl!(bool);
impl ToOcamlRep for char {
fn to_ocamlrep<'a, A: Allocator>(&'a self, _alloc: &'a A) -> Value<'a> {
assert!(*self as u32 <= 255, "char out of range: {}", self);
Value::int(*self as isize)
}
}
impl FromOcamlRep for char {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let c = from::expect_int(value)?;
if (0..=255).contains(&c) {
Ok(c as u8 as char)
} else {
Err(FromError::ExpectedChar(c))
}
}
}
trivial_from_in_impl!(char);
impl ToOcamlRep for f64 {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size_and_tag(1, block::DOUBLE_TAG);
alloc.set_field(&mut block, 0, unsafe {
Value::from_bits(self.to_bits() as usize)
});
block.build()
}
}
impl FromOcamlRep for f64 {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_block_with_size_and_tag(value, 1, block::DOUBLE_TAG)?;
Ok(f64::from_bits(block[0].0 as u64))
}
}
trivial_from_in_impl!(f64);
impl<T: ToOcamlRep + Sized> ToOcamlRep for Box<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(&**self)
}
}
impl<T: FromOcamlRep + Sized> FromOcamlRep for Box<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(Box::new(T::from_ocamlrep(value)?))
}
}
impl<T: ToOcamlRep + Sized> ToOcamlRep for &'_ T {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(
*self as *const T as *const usize as usize,
size_of::<T>(),
|alloc| (**self).to_ocamlrep(alloc),
)
}
}
impl<'a, T: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for &'a T {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
// NB: We don't get any sharing this way.
Ok(alloc.alloc(T::from_ocamlrep_in(value, alloc)?))
}
}
impl<T: ToOcamlRep + Sized> ToOcamlRep for Rc<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(
self.as_ref() as *const T as usize,
size_of::<T>(),
|alloc| alloc.add(self.as_ref()),
)
}
}
impl<T: FromOcamlRep> FromOcamlRep for Rc<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
// NB: We don't get any sharing this way.
Ok(Rc::new(T::from_ocamlrep(value)?))
}
}
impl<T: ToOcamlRep + Sized> ToOcamlRep for Arc<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(
self.as_ref() as *const T as usize,
size_of::<T>(),
|alloc| alloc.add(self.as_ref()),
)
}
}
impl<T: FromOcamlRep> FromOcamlRep for Arc<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
// NB: We don't get any sharing this way.
Ok(Arc::new(T::from_ocamlrep(value)?))
}
}
impl<T: ToOcamlRep> ToOcamlRep for RefCell<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(1);
let value_ref: std::cell::Ref<'a, T> = self.borrow();
alloc.set_field(&mut block, 0, alloc.add(&*value_ref));
// SAFETY: the `&'a self` lifetime is intended to ensure that our `T` is
// not mutated or dropped during the to-OCaml conversion, in order to
// ensure that the allocator's memoization table isn't invalidated. We
// can't guarantee that statically for types with internal mutability,
// so the `ToOcamlRep` docs ask the caller to promise not to mutate or
// drop these values. If they violate that requirement, the allocator
// may give stale results in the event of aliasing, which is definitely
// undesirable, but does not break type safety on the Rust side. The
// allocator ties the lifetime of the Value we're returning to our
// local variable `value_ref`, but it doesn't actually reference that
// local, so it's safe to cast the lifetime away.
unsafe { std::mem::transmute::<Value<'_>, Value<'a>>(block.build()) }
}
}
impl<T: Copy + ToOcamlRep> ToOcamlRep for Cell<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(1);
let value_copy = self.get();
alloc.set_field(&mut block, 0, alloc.add(&value_copy));
// SAFETY: as above with RefCell, we need to cast away the lifetime to
// deal with internal mutability.
unsafe { std::mem::transmute::<Value<'_>, Value<'a>>(block.build()) }
}
}
impl<T: FromOcamlRep> FromOcamlRep for Cell<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 1)?;
let value: T = from::field(block, 0)?;
Ok(Cell::new(value))
}
}
impl<'a, T: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for Cell<T> {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 1)?;
let value: T = from::field_in(block, 0, alloc)?;
Ok(Cell::new(value))
}
}
impl<T: FromOcamlRep> FromOcamlRep for RefCell<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 1)?;
let value: T = from::field(block, 0)?;
Ok(RefCell::new(value))
}
}
impl<'a, T: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for RefCell<T> {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 1)?;
let value: T = from::field_in(block, 0, alloc)?;
Ok(RefCell::new(value))
}
}
impl<T: ToOcamlRep> ToOcamlRep for Option<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
match self {
None => Value::int(0),
Some(val) => {
let mut block = alloc.block_with_size(1);
alloc.set_field(&mut block, 0, alloc.add(val));
block.build()
}
}
}
}
impl<T: FromOcamlRep> FromOcamlRep for Option<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
Ok(None)
} else {
let block = from::expect_block_with_size_and_tag(value, 1, 0)?;
Ok(Some(from::field(block, 0)?))
}
}
}
impl<'a, T: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for Option<T> {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
Ok(None)
} else {
let block = from::expect_block_with_size_and_tag(value, 1, 0)?;
Ok(Some(from::field_in(block, 0, alloc)?))
}
}
}
impl<T: ToOcamlRep, E: ToOcamlRep> ToOcamlRep for Result<T, E> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
match self {
Ok(val) => {
let mut block = alloc.block_with_size(1);
alloc.set_field(&mut block, 0, alloc.add(val));
block.build()
}
Err(val) => {
let mut block = alloc.block_with_size_and_tag(1, 1);
alloc.set_field(&mut block, 0, alloc.add(val));
block.build()
}
}
}
}
impl<T: FromOcamlRep, E: FromOcamlRep> FromOcamlRep for Result<T, E> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_block(value)?;
match block.tag() {
0 => Ok(Ok(from::field(block, 0)?)),
1 => Ok(Err(from::field(block, 0)?)),
t => Err(FromError::BlockTagOutOfRange { max: 1, actual: t }),
}
}
}
impl<'a, T: FromOcamlRepIn<'a>, E: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for Result<T, E> {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_block(value)?;
match block.tag() {
0 => Ok(Ok(from::field_in(block, 0, alloc)?)),
1 => Ok(Err(from::field_in(block, 0, alloc)?)),
t => Err(FromError::BlockTagOutOfRange { max: 1, actual: t }),
}
}
}
impl<T: ToOcamlRep> ToOcamlRep for [T] {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut hd = alloc.add(&());
for val in self.iter().rev() {
let mut block = alloc.block_with_size(2);
alloc.set_field(&mut block, 0, alloc.add(val));
alloc.set_field(&mut block, 1, hd);
hd = block.build();
}
hd
}
}
impl<T: ToOcamlRep> ToOcamlRep for &'_ [T] {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(
self.as_ptr() as usize,
self.len() * size_of::<T>(),
|alloc| (**self).to_ocamlrep(alloc),
)
}
}
impl<'a, T: FromOcamlRepIn<'a>> FromOcamlRepIn<'a> for &'a [T] {
fn from_ocamlrep_in(value: Value<'_>, alloc: &'a Bump) -> Result<Self, FromError> {
let mut len = 0usize;
let mut hd = value;
while !hd.is_immediate() {
let block = from::expect_tuple(hd, 2)?;
len += 1;
hd = block[1];
}
let hd = hd.as_int().unwrap();
if hd != 0 {
return Err(FromError::ExpectedUnit(hd));
}
let mut vec = bumpalo::collections::Vec::with_capacity_in(len, alloc);
let mut hd = value;
while !hd.is_immediate() {
let block = from::expect_tuple(hd, 2).unwrap();
vec.push(from::field_in(block, 0, alloc)?);
hd = block[1];
}
Ok(vec.into_bump_slice())
}
}
impl<T: ToOcamlRep> ToOcamlRep for Box<[T]> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
(**self).to_ocamlrep(alloc)
}
}
impl<T: FromOcamlRep> FromOcamlRep for Box<[T]> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let vec = <Vec<T>>::from_ocamlrep(value)?;
Ok(vec.into_boxed_slice())
}
}
impl<T: ToOcamlRep> ToOcamlRep for Vec<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_slice())
}
}
impl<T: FromOcamlRep> FromOcamlRep for Vec<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let mut vec = vec![];
let mut hd = value;
while !hd.is_immediate() {
let block = from::expect_tuple(hd, 2)?;
vec.push(from::field(block, 0)?);
hd = block[1];
}
let hd = hd.as_int().unwrap();
if hd != 0 {
return Err(FromError::ExpectedUnit(hd));
}
Ok(vec)
}
}
impl<'a, T: FromOcamlRep> FromOcamlRepIn<'a> for Vec<T> {
fn from_ocamlrep_in(value: Value<'_>, _alloc: &'a Bump) -> Result<Self, FromError> {
Self::from_ocamlrep(value)
}
}
impl<K: ToOcamlRep + Ord, V: ToOcamlRep> ToOcamlRep for BTreeMap<K, V> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
if self.is_empty() {
return Value::int(0);
}
let len = self.len();
let mut iter = self
.iter()
.map(|(k, v)| (k.to_ocamlrep(alloc), v.to_ocamlrep(alloc)));
let (res, _) = sorted_iter_to_ocaml_map(&mut iter, alloc, len);
res
}
}
impl<K: FromOcamlRep + Ord, V: FromOcamlRep> FromOcamlRep for BTreeMap<K, V> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let mut map = BTreeMap::new();
btree_map_from_ocamlrep(&mut map, value)?;
Ok(map)
}
}
impl<'a, K: FromOcamlRep + Ord, V: FromOcamlRep> FromOcamlRepIn<'a> for BTreeMap<K, V> {
fn from_ocamlrep_in(value: Value<'_>, _alloc: &'a Bump) -> Result<Self, FromError> {
Self::from_ocamlrep(value)
}
}
/// Given an iterator which emits key-value pairs (already converted to OCaml
/// values), build an OCaml Map containing those bindings. The iterator must
/// emit each key only once. The key-value pairs must be emitted in ascending
/// order, sorted by key. The iterator must emit exactly `size` pairs.
pub fn sorted_iter_to_ocaml_map<'a, A: Allocator>(
iter: &mut impl Iterator<Item = (Value<'a>, Value<'a>)>,
alloc: &'a A,
size: usize,
) -> (Value<'a>, usize) {
if size == 0 {
return (Value::int(0), 0);
}
let (left, left_height) = sorted_iter_to_ocaml_map(iter, alloc, size / 2);
let (key, val) = iter.next().unwrap();
let (right, right_height) = sorted_iter_to_ocaml_map(iter, alloc, size - 1 - size / 2);
let height = std::cmp::max(left_height, right_height) + 1;
let mut block = alloc.block_with_size(5);
alloc.set_field(&mut block, 0, left);
alloc.set_field(&mut block, 1, key);
alloc.set_field(&mut block, 2, val);
alloc.set_field(&mut block, 3, right);
alloc.set_field(&mut block, 4, alloc.add_copy(height));
(block.build(), height)
}
fn btree_map_from_ocamlrep<K: FromOcamlRep + Ord, V: FromOcamlRep>(
map: &mut BTreeMap<K, V>,
value: Value<'_>,
) -> Result<(), FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 5, 0)?;
btree_map_from_ocamlrep(map, block[0])?;
let key: K = from::field(block, 1)?;
let val: V = from::field(block, 2)?;
map.insert(key, val);
btree_map_from_ocamlrep(map, block[3])?;
Ok(())
}
fn vec_from_ocaml_map_impl<K: FromOcamlRep, V: FromOcamlRep>(
vec: &mut Vec<(K, V)>,
value: Value<'_>,
) -> Result<(), FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 5, 0)?;
vec_from_ocaml_map_impl(vec, block[0])?;
let key: K = from::field(block, 1)?;
let val: V = from::field(block, 2)?;
vec.push((key, val));
vec_from_ocaml_map_impl(vec, block[3])?;
Ok(())
}
pub fn vec_from_ocaml_map<K: FromOcamlRep, V: FromOcamlRep>(
value: Value<'_>,
) -> Result<Vec<(K, V)>, FromError> {
let mut vec = vec![];
vec_from_ocaml_map_impl(&mut vec, value)?;
Ok(vec)
}
pub fn vec_from_ocaml_map_in<'a, K, V>(
value: Value<'_>,
vec: &mut bumpalo::collections::Vec<'a, (K, V)>,
alloc: &'a Bump,
) -> Result<(), FromError>
where
K: FromOcamlRepIn<'a> + Ord,
V: FromOcamlRepIn<'a>,
{
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 5, 0)?;
vec_from_ocaml_map_in(block[0], vec, alloc)?;
let key: K = from::field_in(block, 1, alloc)?;
let val: V = from::field_in(block, 2, alloc)?;
vec.push((key, val));
vec_from_ocaml_map_in(block[3], vec, alloc)?;
Ok(())
}
impl<T: ToOcamlRep + Ord> ToOcamlRep for BTreeSet<T> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
if self.is_empty() {
return Value::int(0);
}
let len = self.len();
let mut iter = self.iter().map(|x| x.to_ocamlrep(alloc));
let (res, _) = sorted_iter_to_ocaml_set(&mut iter, alloc, len);
res
}
}
impl<T: FromOcamlRep + Ord> FromOcamlRep for BTreeSet<T> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let mut set = BTreeSet::new();
btree_set_from_ocamlrep(&mut set, value)?;
Ok(set)
}
}
impl<'a, T: FromOcamlRep + Ord> FromOcamlRepIn<'a> for BTreeSet<T> {
fn from_ocamlrep_in(value: Value<'_>, _alloc: &'a Bump) -> Result<Self, FromError> {
Self::from_ocamlrep(value)
}
}
/// Build an OCaml Set containing all items emitted by the given iterator. The
/// iterator must emit each item only once. The items must be emitted in
/// ascending order. The iterator must emit exactly `size` items.
pub fn sorted_iter_to_ocaml_set<'a, A: Allocator>(
iter: &mut impl Iterator<Item = Value<'a>>,
alloc: &'a A,
size: usize,
) -> (Value<'a>, usize) {
if size == 0 {
return (Value::int(0), 0);
}
let (left, left_height) = sorted_iter_to_ocaml_set(iter, alloc, size / 2);
let val = iter.next().unwrap();
let (right, right_height) = sorted_iter_to_ocaml_set(iter, alloc, size - 1 - size / 2);
let height = std::cmp::max(left_height, right_height) + 1;
let mut block = alloc.block_with_size(4);
alloc.set_field(&mut block, 0, left);
alloc.set_field(&mut block, 1, val);
alloc.set_field(&mut block, 2, right);
alloc.set_field(&mut block, 3, alloc.add_copy(height));
(block.build(), height)
}
fn btree_set_from_ocamlrep<T: FromOcamlRep + Ord>(
set: &mut BTreeSet<T>,
value: Value<'_>,
) -> Result<(), FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 4, 0)?;
btree_set_from_ocamlrep(set, block[0])?;
set.insert(from::field(block, 1)?);
btree_set_from_ocamlrep(set, block[2])?;
Ok(())
}
fn vec_from_ocaml_set_impl<T: FromOcamlRep>(
value: Value<'_>,
vec: &mut Vec<T>,
) -> Result<(), FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 4, 0)?;
vec_from_ocaml_set_impl(block[0], vec)?;
vec.push(from::field(block, 1)?);
vec_from_ocaml_set_impl(block[2], vec)?;
Ok(())
}
pub fn vec_from_ocaml_set<T: FromOcamlRep>(value: Value<'_>) -> Result<Vec<T>, FromError> {
let mut vec = vec![];
vec_from_ocaml_set_impl(value, &mut vec)?;
Ok(vec)
}
pub fn vec_from_ocaml_set_in<'a, T: FromOcamlRepIn<'a> + Ord>(
value: Value<'_>,
vec: &mut bumpalo::collections::Vec<'a, T>,
alloc: &'a Bump,
) -> Result<(), FromError> {
if value.is_immediate() {
let _ = from::expect_nullary_variant(value, 0)?;
return Ok(());
}
let block = from::expect_block_with_size_and_tag(value, 4, 0)?;
vec_from_ocaml_set_in(block[0], vec, alloc)?;
vec.push(from::field_in(block, 1, alloc)?);
vec_from_ocaml_set_in(block[2], vec, alloc)?;
Ok(())
}
impl<K: ToOcamlRep + Ord, V: ToOcamlRep, S: BuildHasher + Default> ToOcamlRep
for IndexMap<K, V, S>
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
if self.is_empty() {
return Value::int(0);
}
let mut vec: Vec<(&'a K, &'a V)> = self.iter().collect();
vec.sort_unstable_by_key(|&(k, _)| k);
let len = vec.len();
let mut iter = vec.iter().map(|(k, v)| {
let k: &'a K = *k;
let v: &'a V = *v;
(k.to_ocamlrep(alloc), v.to_ocamlrep(alloc))
});
let (res, _) = sorted_iter_to_ocaml_map(&mut iter, alloc, len);
res
}
}
impl<K: FromOcamlRep + Ord + Hash, V: FromOcamlRep, S: BuildHasher + Default> FromOcamlRep
for IndexMap<K, V, S>
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let vec = vec_from_ocaml_map(value)?;
Ok(vec.into_iter().collect())
}
}
impl<T: ToOcamlRep + Ord, S: BuildHasher + Default> ToOcamlRep for IndexSet<T, S> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
if self.is_empty() {
return Value::int(0);
}
let mut vec: Vec<&'a T> = self.iter().collect();
vec.sort_unstable();
let len = vec.len();
let mut iter = vec.iter().copied().map(|x| x.to_ocamlrep(alloc));
let (res, _) = sorted_iter_to_ocaml_set(&mut iter, alloc, len);
res
}
}
impl<T: FromOcamlRep + Ord + Hash, S: BuildHasher + Default> FromOcamlRep for IndexSet<T, S> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let set = <BTreeSet<T>>::from_ocamlrep(value)?;
Ok(set.into_iter().collect())
}
}
impl ToOcamlRep for OsStr {
// TODO: A Windows implementation would be nice, but what does the OCaml
// runtime do? If we need Windows support, we'll have to find out.
#[cfg(unix)]
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
use std::os::unix::ffi::OsStrExt;
alloc.add(self.as_bytes())
}
}
impl ToOcamlRep for &'_ OsStr {
#[cfg(unix)]
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
use std::os::unix::ffi::OsStrExt;
alloc.add(self.as_bytes())
}
}
impl<'a> FromOcamlRepIn<'a> for &'a OsStr {
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
use std::os::unix::ffi::OsStrExt;
Ok(std::ffi::OsStr::from_bytes(<&'a [u8]>::from_ocamlrep_in(
value, alloc,
)?))
}
}
impl ToOcamlRep for OsString {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_os_str())
}
}
impl FromOcamlRep for OsString {
#[cfg(unix)]
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
use std::os::unix::ffi::OsStrExt;
Ok(OsString::from(std::ffi::OsStr::from_bytes(
bytes_from_ocamlrep(value)?,
)))
}
}
impl ToOcamlRep for Path {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_os_str())
}
}
impl ToOcamlRep for &'_ Path {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_os_str())
}
}
impl<'a> FromOcamlRepIn<'a> for &'a Path {
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
Ok(Path::new(<&'a OsStr>::from_ocamlrep_in(value, alloc)?))
}
}
impl ToOcamlRep for PathBuf {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_os_str())
}
}
impl FromOcamlRep for PathBuf {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(PathBuf::from(OsString::from_ocamlrep(value)?))
}
}
impl ToOcamlRep for String {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_str())
}
}
impl FromOcamlRep for String {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(String::from(str_from_ocamlrep(value)?))
}
}
trivial_from_in_impl!(String);
impl ToOcamlRep for Cow<'_, str> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let s: &str = self.borrow();
alloc.add(s)
}
}
impl FromOcamlRep for Cow<'_, str> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(Cow::Owned(String::from(str_from_ocamlrep(value)?)))
}
}
impl ToOcamlRep for str {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
str_to_ocamlrep(self, alloc)
}
}
impl ToOcamlRep for &'_ str {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(self.as_bytes().as_ptr() as usize, self.len(), |alloc| {
(**self).to_ocamlrep(alloc)
})
}
}
impl<'a> FromOcamlRepIn<'a> for &'a str {
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
Ok(alloc.alloc_str(str_from_ocamlrep(value)?))
}
}
/// Allocate an OCaml string using the given allocator and copy the given string
/// slice into it.
pub fn str_to_ocamlrep<'a, A: Allocator>(s: &str, alloc: &'a A) -> Value<'a> {
bytes_to_ocamlrep(s.as_bytes(), alloc)
}
/// Given an OCaml string, return a string slice pointing to its contents, if
/// they are valid UTF-8.
pub fn str_from_ocamlrep<'a>(value: Value<'a>) -> Result<&'a str, FromError> {
Ok(std::str::from_utf8(bytes_from_ocamlrep(value)?)?)
}
impl ToOcamlRep for Vec<u8> {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_slice())
}
}
impl FromOcamlRep for Vec<u8> {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(Vec::from(bytes_from_ocamlrep(value)?))
}
}
impl ToOcamlRep for BString {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.add(self.as_slice())
}
}
impl FromOcamlRep for BString {
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
Ok(Vec::from_ocamlrep(value)?.into())
}
}
impl ToOcamlRep for BStr {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
bytes_to_ocamlrep(self, alloc)
}
}
impl ToOcamlRep for &'_ BStr {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(self.as_ptr() as usize, self.len(), |alloc| {
(**self).to_ocamlrep(alloc)
})
}
}
impl<'a> FromOcamlRepIn<'a> for &'a BStr {
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let slice: &[u8] = alloc.alloc_slice_copy(bytes_from_ocamlrep(value)?);
Ok(slice.into())
}
}
impl ToOcamlRep for [u8] {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
bytes_to_ocamlrep(self, alloc)
}
}
impl ToOcamlRep for &'_ [u8] {
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
alloc.memoized(self.as_ptr() as usize, self.len(), |alloc| {
(**self).to_ocamlrep(alloc)
})
}
}
impl<'a> FromOcamlRepIn<'a> for &'a [u8] {
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
Ok(alloc.alloc_slice_copy(bytes_from_ocamlrep(value)?))
}
}
/// Allocate an OCaml string using the given allocator and copy the given byte
/// slice into it.
pub fn bytes_to_ocamlrep<'a, A: Allocator>(s: &[u8], alloc: &'a A) -> Value<'a> {
let words = (s.len() + 1 /*null-ending*/ + (WORD_SIZE - 1)/*rounding*/) / WORD_SIZE;
let length = words * WORD_SIZE;
let mut block = alloc.block_with_size_and_tag(words, block::STRING_TAG);
let block_contents_as_slice: &mut [u8] = unsafe {
let block = alloc.block_ptr_mut(&mut block);
*block.add(words - 1) = Value::from_bits(0);
let block_bytes = block as *mut u8;
*block_bytes.add(length - 1) = (length - s.len() - 1) as u8;
std::slice::from_raw_parts_mut(block_bytes, s.len())
};
block_contents_as_slice.copy_from_slice(s);
block.build()
}
/// Given an OCaml string, return a byte slice pointing to its contents.
pub fn bytes_from_ocamlrep<'a>(value: Value<'a>) -> Result<&'a [u8], FromError> {
let block = from::expect_block(value)?;
from::expect_block_tag(block, block::STRING_TAG)?;
let block_size_in_bytes = block.size() * std::mem::size_of::<Value<'_>>();
let slice = unsafe {
let ptr = block.0.as_ptr().add(1) as *const u8;
let padding = *ptr.add(block_size_in_bytes - 1);
let len = block_size_in_bytes - padding as usize - 1;
std::slice::from_raw_parts(ptr, len)
};
Ok(slice)
}
impl<T0, T1> ToOcamlRep for (T0, T1)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(2);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
block.build()
}
}
impl<T0, T1> FromOcamlRep for (T0, T1)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 2)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
Ok((f0, f1))
}
}
impl<'a, T0, T1> FromOcamlRepIn<'a> for (T0, T1)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 2)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
Ok((f0, f1))
}
}
impl<T0, T1, T2> ToOcamlRep for (T0, T1, T2)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(3);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
block.build()
}
}
impl<T0, T1, T2> FromOcamlRep for (T0, T1, T2)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 3)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
Ok((f0, f1, f2))
}
}
impl<'a, T0, T1, T2> FromOcamlRepIn<'a> for (T0, T1, T2)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 3)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
Ok((f0, f1, f2))
}
}
impl<T0, T1, T2, T3> ToOcamlRep for (T0, T1, T2, T3)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
T3: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(4);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
alloc.set_field(&mut block, 3, alloc.add(&self.3));
block.build()
}
}
impl<T0, T1, T2, T3> FromOcamlRep for (T0, T1, T2, T3)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
T3: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 4)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
let f3: T3 = from::field(block, 3)?;
Ok((f0, f1, f2, f3))
}
}
impl<'a, T0, T1, T2, T3> FromOcamlRepIn<'a> for (T0, T1, T2, T3)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
T3: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 4)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
let f3: T3 = from::field_in(block, 3, alloc)?;
Ok((f0, f1, f2, f3))
}
}
impl<T0, T1, T2, T3, T4> ToOcamlRep for (T0, T1, T2, T3, T4)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
T3: ToOcamlRep,
T4: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(5);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
alloc.set_field(&mut block, 3, alloc.add(&self.3));
alloc.set_field(&mut block, 4, alloc.add(&self.4));
block.build()
}
}
impl<T0, T1, T2, T3, T4> FromOcamlRep for (T0, T1, T2, T3, T4)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
T3: FromOcamlRep,
T4: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 5)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
let f3: T3 = from::field(block, 3)?;
let f4: T4 = from::field(block, 4)?;
Ok((f0, f1, f2, f3, f4))
}
}
impl<'a, T0, T1, T2, T3, T4> FromOcamlRepIn<'a> for (T0, T1, T2, T3, T4)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
T3: FromOcamlRepIn<'a>,
T4: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 5)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
let f3: T3 = from::field_in(block, 3, alloc)?;
let f4: T4 = from::field_in(block, 4, alloc)?;
Ok((f0, f1, f2, f3, f4))
}
}
impl<T0, T1, T2, T3, T4, T5> ToOcamlRep for (T0, T1, T2, T3, T4, T5)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
T3: ToOcamlRep,
T4: ToOcamlRep,
T5: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(6);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
alloc.set_field(&mut block, 3, alloc.add(&self.3));
alloc.set_field(&mut block, 4, alloc.add(&self.4));
alloc.set_field(&mut block, 5, alloc.add(&self.5));
block.build()
}
}
impl<T0, T1, T2, T3, T4, T5> FromOcamlRep for (T0, T1, T2, T3, T4, T5)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
T3: FromOcamlRep,
T4: FromOcamlRep,
T5: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 6)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
let f3: T3 = from::field(block, 3)?;
let f4: T4 = from::field(block, 4)?;
let f5: T5 = from::field(block, 5)?;
Ok((f0, f1, f2, f3, f4, f5))
}
}
impl<'a, T0, T1, T2, T3, T4, T5> FromOcamlRepIn<'a> for (T0, T1, T2, T3, T4, T5)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
T3: FromOcamlRepIn<'a>,
T4: FromOcamlRepIn<'a>,
T5: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 6)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
let f3: T3 = from::field_in(block, 3, alloc)?;
let f4: T4 = from::field_in(block, 4, alloc)?;
let f5: T5 = from::field_in(block, 5, alloc)?;
Ok((f0, f1, f2, f3, f4, f5))
}
}
impl<T0, T1, T2, T3, T4, T5, T6> ToOcamlRep for (T0, T1, T2, T3, T4, T5, T6)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
T3: ToOcamlRep,
T4: ToOcamlRep,
T5: ToOcamlRep,
T6: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(7);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
alloc.set_field(&mut block, 3, alloc.add(&self.3));
alloc.set_field(&mut block, 4, alloc.add(&self.4));
alloc.set_field(&mut block, 5, alloc.add(&self.5));
alloc.set_field(&mut block, 6, alloc.add(&self.6));
block.build()
}
}
impl<T0, T1, T2, T3, T4, T5, T6> FromOcamlRep for (T0, T1, T2, T3, T4, T5, T6)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
T3: FromOcamlRep,
T4: FromOcamlRep,
T5: FromOcamlRep,
T6: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 7)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
let f3: T3 = from::field(block, 3)?;
let f4: T4 = from::field(block, 4)?;
let f5: T5 = from::field(block, 5)?;
let f6: T6 = from::field(block, 6)?;
Ok((f0, f1, f2, f3, f4, f5, f6))
}
}
impl<'a, T0, T1, T2, T3, T4, T5, T6> FromOcamlRepIn<'a> for (T0, T1, T2, T3, T4, T5, T6)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
T3: FromOcamlRepIn<'a>,
T4: FromOcamlRepIn<'a>,
T5: FromOcamlRepIn<'a>,
T6: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 7)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
let f3: T3 = from::field_in(block, 3, alloc)?;
let f4: T4 = from::field_in(block, 4, alloc)?;
let f5: T5 = from::field_in(block, 5, alloc)?;
let f6: T6 = from::field_in(block, 6, alloc)?;
Ok((f0, f1, f2, f3, f4, f5, f6))
}
}
impl<T0, T1, T2, T3, T4, T5, T6, T7> ToOcamlRep for (T0, T1, T2, T3, T4, T5, T6, T7)
where
T0: ToOcamlRep,
T1: ToOcamlRep,
T2: ToOcamlRep,
T3: ToOcamlRep,
T4: ToOcamlRep,
T5: ToOcamlRep,
T6: ToOcamlRep,
T7: ToOcamlRep,
{
fn to_ocamlrep<'a, A: Allocator>(&'a self, alloc: &'a A) -> Value<'a> {
let mut block = alloc.block_with_size(8);
alloc.set_field(&mut block, 0, alloc.add(&self.0));
alloc.set_field(&mut block, 1, alloc.add(&self.1));
alloc.set_field(&mut block, 2, alloc.add(&self.2));
alloc.set_field(&mut block, 3, alloc.add(&self.3));
alloc.set_field(&mut block, 4, alloc.add(&self.4));
alloc.set_field(&mut block, 5, alloc.add(&self.5));
alloc.set_field(&mut block, 6, alloc.add(&self.6));
alloc.set_field(&mut block, 7, alloc.add(&self.7));
block.build()
}
}
impl<T0, T1, T2, T3, T4, T5, T6, T7> FromOcamlRep for (T0, T1, T2, T3, T4, T5, T6, T7)
where
T0: FromOcamlRep,
T1: FromOcamlRep,
T2: FromOcamlRep,
T3: FromOcamlRep,
T4: FromOcamlRep,
T5: FromOcamlRep,
T6: FromOcamlRep,
T7: FromOcamlRep,
{
fn from_ocamlrep(value: Value<'_>) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 8)?;
let f0: T0 = from::field(block, 0)?;
let f1: T1 = from::field(block, 1)?;
let f2: T2 = from::field(block, 2)?;
let f3: T3 = from::field(block, 3)?;
let f4: T4 = from::field(block, 4)?;
let f5: T5 = from::field(block, 5)?;
let f6: T6 = from::field(block, 6)?;
let f7: T7 = from::field(block, 7)?;
Ok((f0, f1, f2, f3, f4, f5, f6, f7))
}
}
impl<'a, T0, T1, T2, T3, T4, T5, T6, T7> FromOcamlRepIn<'a> for (T0, T1, T2, T3, T4, T5, T6, T7)
where
T0: FromOcamlRepIn<'a>,
T1: FromOcamlRepIn<'a>,
T2: FromOcamlRepIn<'a>,
T3: FromOcamlRepIn<'a>,
T4: FromOcamlRepIn<'a>,
T5: FromOcamlRepIn<'a>,
T6: FromOcamlRepIn<'a>,
T7: FromOcamlRepIn<'a>,
{
fn from_ocamlrep_in<'b>(value: Value<'b>, alloc: &'a Bump) -> Result<Self, FromError> {
let block = from::expect_tuple(value, 8)?;
let f0: T0 = from::field_in(block, 0, alloc)?;
let f1: T1 = from::field_in(block, 1, alloc)?;
let f2: T2 = from::field_in(block, 2, alloc)?;
let f3: T3 = from::field_in(block, 3, alloc)?;
let f4: T4 = from::field_in(block, 4, alloc)?;
let f5: T5 = from::field_in(block, 5, alloc)?;
let f6: T6 = from::field_in(block, 6, alloc)?;
let f7: T7 = from::field_in(block, 7, alloc)?;
Ok((f0, f1, f2, f3, f4, f5, f6, f7))
}
}
|
use aes::{Aes128, BlockEncrypt, NewBlockCipher, cipher::{generic_array::GenericArray}};
use rand_core::OsRng;
use x25519_dalek::{EphemeralSecret, PublicKey};
use super::status::Status;
use std::{fs::File, io::{self, Read, Write}, net::TcpStream};
pub struct Client {
stream: TcpStream,
cipher: Aes128,
buffer: Vec<u8>,
}
impl Client {
pub fn connect(mut stream: TcpStream) -> io::Result<Self> {
let secret = EphemeralSecret::new(OsRng);
let public = PublicKey::from(&secret);
stream.write(public.as_bytes())?;
let mut other_public = [0u8; 32];
stream.read(&mut other_public)?;
let other_public = PublicKey::from(other_public);
let shared_secret = secret.diffie_hellman(&other_public);
let key = GenericArray::from_slice(&shared_secret.as_bytes()[0..16]);
let cipher = Aes128::new(&key);
Ok(Self { stream, cipher, buffer: vec![] })
}
fn write(&mut self, data: &[u8]) {
self.buffer.extend_from_slice(data);
}
// convert plaintext stored in buffer into 16 byte chunks w/ padding
// encrypt each block with AES and then write to socket
fn flush(&mut self) -> io::Result<()> {
let length = (self.buffer.len() as u64).to_le_bytes();
let mut block = GenericArray::clone_from_slice(&[length, [0u8; 8]].concat());
self.cipher.encrypt_block(&mut block);
self.stream.write(block.as_slice())?;
self.write(vec![0u8; 16 - self.buffer.len() % 16].as_slice());
for chunk in self.buffer.chunks_mut(16) {
self.cipher.encrypt_block(GenericArray::from_mut_slice(chunk));
}
self.stream.write(self.buffer.as_slice())?;
Ok(())
}
pub fn write_message(&mut self, message: &str) -> io::Result<()> {
self.write(&[Status::Message as u8]);
self.write(message.as_bytes());
self.flush()?;
Ok(())
}
pub fn write_file(&mut self, path: &str) -> io::Result<()> {
let mut file = File::open(path.clone())?;
self.write(&[Status::File as u8]);
self.write(path.as_bytes());
self.write(&['\n' as u8]);
file.read_to_end(&mut self.buffer)?;
self.flush()?;
Ok(())
}
}
|
//! Defines the `Value` type and its related constants.
/// Evaluation value in centipawns.
///
/// Positive values mean that the position is favorable for the side
/// to move. Negative values mean the position is favorable for the
/// other side (not to move). A value of `0` means that the chances
/// are equal. For example: a value of `100` might mean that the side
/// to move is a pawn ahead.
///
/// # Constants:
///
/// * `VALUE_UNKNOWN` has the special meaning of "unknown value".
///
/// * Values bigger than `VALUE_EVAL_MAX` designate a win by
/// inevitable checkmate.
///
/// * Values smaller than `VALUE_EVAL_MIN` designate a loss by
/// inevitable checkmate.
///
/// * `VALUE_MAX` designates a checkmate (a win).
///
/// * `VALUE_MAX - 1` designates an inevitable checkmate (a win) in
/// 1 half-move.
///
/// * `VALUE_MAX - 2` designates an inevitable checkmate (a win) in
/// 2 half-moves.
///
/// * and so forth.
///
/// * `VALUE_MIN` designates a checkmate (a loss).
///
/// * `VALUE_MIN + 1` designates an inevitable checkmate (a loss)
/// in 1 half-move.
///
/// * `VALUE_MIN + 2` designates an inevitable checkmate (a loss)
/// in 2 half-moves.
///
/// * and so forth.
pub type Value = i16;
pub const VALUE_UNKNOWN: Value = VALUE_MIN - 1;
pub const VALUE_MAX: Value = ::std::i16::MAX;
pub const VALUE_MIN: Value = -VALUE_MAX;
pub const VALUE_EVAL_MAX: Value = 29999;
pub const VALUE_EVAL_MIN: Value = -VALUE_EVAL_MAX;
|
extern crate dotenv;
use crate::errors::ServiceError;
use crate::graphql::Context;
use crate::models::character::Character;
use diesel::prelude::*;
pub fn characters(context: &Context) -> Result<Vec<Character>, ServiceError> {
use crate::schema::characters::dsl::*;
let conn: &MysqlConnection = &context.db.lock().unwrap();
if context.user.is_none() {
return Err(ServiceError::Unauthorized);
}
let characters_data = characters.load::<Character>(conn).expect("");
Ok(characters_data)
}
|
//Temporary warning disables for development
#![allow(non_snake_case)]
#![allow(dead_code)]
#![allow(unused_variables)]
//use std::ptr;
use trees::{tr, Node};
//Struct representing a node
#[derive(Copy, Clone)]
struct CraftingNode{
test_id: i16, //Temporary ID for testing known graphs
is_goal: bool,
progress: i16, //Simplified values
quality: i16,
durability: i16,
/* FFXIV specific parts, begin with testing IDDFS
remaining_durability: i16,
remaining_cp: i16,
quality_expected: i16,
quality_max: i16,
quality_min: i16,
progress_expected: i16,
progress_max: i16,
progress_min: i16,*/
//child_nodes: Option<Vec<Node>>,
}
impl PartialEq for CraftingNode {
fn eq(&self, other: &Self) -> bool {
return self.test_id == other.test_id;
}
}
const MAX_DEPTH: i16 = 300;
// parent.push_back(tr(new_node from this))
fn make_node(traits: [i16; 4]) -> trees::Tree<CraftingNode> {
//Calculate if goal or not
let progress = traits[1];
let mut item_complete = false;
if progress >= 100 {
item_complete = true;
}
let new_node = CraftingNode {
test_id: traits[0],
is_goal: item_complete,
progress: traits[1],
quality: traits[2],
durability: traits[3],
};
return tr(new_node);
}
//Replace with struct/according to struct or something?
fn synthesis(old: &CraftingNode) -> trees::Tree<CraftingNode> {
return make_node([old.test_id*2,
old.progress + 20,
old.quality,
old.durability - 10]);
}
fn touch(old: &CraftingNode) -> trees::Tree<CraftingNode> {
return make_node([old.test_id*2,
old.progress,
old.quality + 30,
old.durability - 10]);
}
fn generate_children(root: &mut Node<CraftingNode>) {
for mut current in root.iter_mut() {
let current_data = current.data().clone();
let current_id = current_data.test_id;
println!("\n{}", current_id);
//children nodes according to action types
if current_data.durability > 10 {
current.push_back(touch(¤t_data));
current.push_back(synthesis(¤t_data));
}
let child_iter = current.iter_mut();
//Once tree is built, this is equivalent of going down in depth, move to functions
}
}
fn main() {
println!("\n-------------");
println!("Run Start");
println!("-------------\n");
//First, build up the tree
//Set up the root
let mut root = tr(CraftingNode {
test_id: 0,
is_goal: false,
progress: 0,
quality: 0,
durability: 40,
});
generate_children(&mut root);
//Now can do IDDFS over the tree
println!("\nEND\n");
}
/*fn iterative_depth_first_search(root: &Node) -> Option<&Node> {
//Search to depths of increasing size
for depth in 1..MAX_DEPTH {
//Start a search from the root to the given depth
//Get tuple telling us if there are remaining deeper nodes and returning any found nodes.
//Expecting tuple with
let results:(Option<&Node>, bool) = depth_first_search(root, depth);
if results.0 != None {
return results.0;
/*
TODO: Update this so instead of returning first success node and stopping, instead
display the found crafting path and continue searching for better ones
Also collect path taken to get here
*/
} else if results.1 == false {
//Searched through entire tree, failed to find successful path
return None;
}
}
return None; //Failed to find (?), returning null node
}
//Depth limited search for directed graphs
fn depth_first_search(current_node: &Node, depth: i16) -> (Option<&Node>, bool) {
if depth == 0 {
//We've hit max depth for this iteration, treat as leaf node
if current_node.is_goal {
return (Some(current_node), true);
} else {
//There are still remaining children potentially, but they
// will be visited in the next iteration with higher depth
return (None, true);
}
} else {//if depth > 0 {
assert!(depth > 0);
let nodes_remain = false;
//Depth remaining, check the child nodes, if any
for child in /*current_node.as_ref().child_nodes.unwrap_or_else(Vec::new).iter()*/ {
//Recurse down to the next depth
let results = depth_first_search(child, depth-1);
if results.0 != None {
//We found the goal node, but there are still nodes we haven't checked
return (results.0, true);
} else if results.1 {
//Haven't found the node, but there are still nodes we haven't checked
let nodes_remain = true;
}
}
//We didn't find the goal node, return whether any nodes remain as well.
return (None, nodes_remain);
}
}*/
/*
IDFFS recreted by referencing wikipedia
First papers on this algorithm by Richard E Korf (1985), "Depth-first iterative deepening"
https://en.wikipedia.org/wiki/Iterative_deepening_depth-first_search
function IDDFS(root) is
for depth from 0 to ∞ do
found, remaining ← DLS(root, depth)
if found ≠ null then
return found
else if not remaining then
return null
function DLS(node, depth) is
if depth = 0 then
if node is a goal then
return (node, true)
else
return (null, true) (Not found, but may have children)
else if depth > 0 then
any_remaining ← false
foreach child of node do
found, remaining ← DLS(child, depth−1)
if found ≠ null then
return (found, true)
if remaining then
any_remaining ← true (At least one node found at depth, let IDDFS deepen)
return (null, any_remaining)
*/
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
#![allow(clippy::single_match)]
#![allow(unused_variables)]
#![warn(clippy::blacklisted_name)]
fn test(toto: ()) {}
fn main() {
let toto = 42;
let tata = 42;
let titi = 42;
let tatab = 42;
let tatatataic = 42;
match (42, Some(1337), Some(0)) {
(toto, Some(tata), titi @ Some(_)) => (),
_ => (),
}
}
|
// q0062_unique_paths
struct Solution;
impl Solution {
pub fn unique_paths(m: i32, n: i32) -> i32 {
if m * n == 0 {
return 0;
}
let m = m as usize;
let n = n as usize;
let mut map = Vec::with_capacity(m);
for _ in 0..n - 1 {
map.push(vec![0; m]);
}
map.push(vec![1; m]);
for v in map.iter_mut() {
v[m - 1] = 1;
}
for i in (0..n - 1).rev() {
for j in (0..m - 1).rev() {
map[i][j] = &map[i + 1][j] + &map[i][j + 1];
}
}
map[0][0]
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(3, Solution::unique_paths(3, 2));
assert_eq!(0, Solution::unique_paths(0, 2));
assert_eq!(0, Solution::unique_paths(0, 0));
assert_eq!(0, Solution::unique_paths(3, 0));
assert_eq!(1, Solution::unique_paths(1, 1));
assert_eq!(28, Solution::unique_paths(7, 3));
}
}
|
use std::collections::HashSet;
fn main() {
let input = include_str!("../data/2015-03.txt");
println!("Part 1: {}", part1(input));
println!("Part 2: {}", part2(input));
}
fn part1(input: &str) -> usize {
let mut santa = Santa::new();
for m in input.chars() {
santa.go(m);
}
santa.houses.len()
}
fn part2(input: &str) -> usize {
let mut santa = Santa::new();
let mut robosanta = Santa::new();
let mut direct_santa = true;
for m in input.chars() {
if direct_santa {
santa.go(m);
} else {
robosanta.go(m);
}
direct_santa = !direct_santa;
}
santa.houses.union(&robosanta.houses).count()
}
struct Santa {
x: i32,
y: i32,
houses: HashSet<(i32, i32)>
}
impl Santa {
fn new() -> Santa {
let mut h = HashSet::new();
h.insert((0, 0));
Santa { x: 0, y: 0, houses: h }
}
fn go(&mut self, dir: char) {
match dir {
'^' => self.y += 1,
'v' => self.y -= 1,
'>' => self.x += 1,
'<' => self.x -= 1,
_ => ()
}
self.houses.insert((self.x, self.y));
}
}
#[test]
fn test1() {
aoc::test(part1, [
(">", 2),
("^>v<", 4),
("^v^v^v^v^v", 2)
])
}
#[test]
fn test2() {
aoc::test(part2, [
("^v", 3),
("^>v<", 3),
("^v^v^v^v^v", 11)
])
}
|
use super::ui::*;
use std::collections::HashMap;
use std::io::Error as IoError;
use std::path::Path;
use std::{fmt, fs};
use toml_document::ParserError as TomlError;
#[derive(Debug, Default)]
pub struct Config {
pub server: String,
pub nick: String,
pub user: String,
pub real: String,
pub pass: String,
pub keybinds: Keybinds,
}
pub enum Error {
CannotRead(IoError),
CannotParse(TomlError),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::CannotRead(err) => {
error!("CannotRead: {}", err);
write!(f, "cannot read the config file")
}
Error::CannotParse(err) => {
error!("CannotParse: {}", err);
write!(f, "cannot parse the config file")
}
}
}
}
impl Config {
pub fn load(path: impl AsRef<Path>) -> Result<Self, Error> {
let data = fs::read_to_string(path.as_ref()).map_err(Error::CannotRead)?;
use toml_document::*;
let doc = Document::parse(&data).map_err(Error::CannotParse)?;
let list = vec![
"server".to_string(),
"nick".to_string(),
"user".to_string(),
"real".to_string(),
"pass".to_string(),
];
let mut map: HashMap<String, Option<String>> = HashMap::new();
for el in list {
map.insert(el, None);
}
for child in doc.get_container(0).iter_children() {
if let ValueRef::String(data) = child.value() {
if let Some(val) = map.get_mut(child.key().get()) {
*val = Some(data.get().to_string());
}
}
}
let mut keybinds = Keybinds::default();
for child in doc.get_container(1).iter_children() {
if let ValueRef::String(data) = child.value() {
if let Some(req) = KeyRequest::parse(child.key().get().to_string()) {
keybinds.insert(KeyType::from(data.get().to_string()), req)
}
}
}
Ok(Config {
server: map.remove("server").unwrap().unwrap(),
nick: map.remove("nick").unwrap().unwrap(),
user: map.remove("user").unwrap().unwrap(),
real: map.remove("real").unwrap().unwrap(),
pass: map.remove("pass").unwrap().unwrap(),
keybinds,
})
}
pub fn dump(&self, w: &mut impl ::std::io::Write) {
use toml_document::*;
let mut doc = Document::new();
let container = doc.insert_container(0, vec!["irc"].into_iter(), ContainerKind::Table);
for (i, (k, v)) in [
("server", &self.server),
("nick", &self.nick),
("user", &self.user),
("real", &self.real),
("pass", &self.pass),
]
.into_iter()
.enumerate()
{
container.insert_string(i, k.to_string(), v.to_string());
}
let container = doc.insert_container(1, vec!["keybinds"].into_iter(), ContainerKind::Table);
for (i, (v, k)) in self.keybinds.iter().enumerate() {
let _s = container.insert_string(i, format!("{}", v), format!("{}", k));
}
writeln!(w, "{}", doc.to_string()).expect("to write config");
}
pub fn save(&self) {
let mut file = fs::File::create("riirc.toml").expect("to create file");
self.dump(&mut file);
}
}
|
mod env;
mod eval;
mod ops;
mod parser;
mod tree;
use env::Env;
use eval::*;
use parser::*;
fn main() {
let mut global_env = Env::new();
let program = "(+ (/ (- 5 2) 3) (* 2 5.1))";
match parse(program) {
Ok(tree) => match eval(&tree, &mut global_env) {
Ok(val) => println!("{:?}", val),
Err(_) => println!("eval error"),
},
Err(_) => println!("parse error"),
}
}
#[cfg(test)]
mod test;
|
//! Methods for dumping serializable structs to a compressed binary format,
//! used to allow fast startup times
//!
//! Currently syntect serializes [`SyntaxSet`] structs with [`dump_to_uncompressed_file`]
//! into `.packdump` files and likewise [`ThemeSet`] structs to `.themedump` files with [`dump_to_file`].
//!
//! You can use these methods to manage your own caching of compiled syntaxes and
//! themes. And even your own `serde::Serialize` structures if you want to
//! be consistent with your format.
//!
//! [`SyntaxSet`]: ../parsing/struct.SyntaxSet.html
//! [`dump_to_uncompressed_file`]: fn.dump_to_uncompressed_file.html
//! [`ThemeSet`]: ../highlighting/struct.ThemeSet.html
//! [`dump_to_file`]: fn.dump_to_file.html
use bincode::Result;
#[cfg(feature = "dump-load")]
use bincode::deserialize_from;
#[cfg(feature = "dump-create")]
use bincode::serialize_into;
use std::fs::File;
#[cfg(feature = "dump-load")]
use std::io::{BufRead};
#[cfg(feature = "dump-create")]
use std::io::{BufWriter, Write};
#[cfg(all(feature = "default-syntaxes"))]
use crate::parsing::SyntaxSet;
#[cfg(all(feature = "default-themes"))]
use crate::highlighting::ThemeSet;
use std::path::Path;
#[cfg(feature = "dump-create")]
use flate2::write::ZlibEncoder;
#[cfg(feature = "dump-load")]
use flate2::bufread::ZlibDecoder;
#[cfg(feature = "dump-create")]
use flate2::Compression;
#[cfg(feature = "dump-create")]
use serde::Serialize;
#[cfg(feature = "dump-load")]
use serde::de::DeserializeOwned;
/// Dumps an object to the given writer in a compressed binary format
///
/// The writer is encoded with the `bincode` crate and compressed with `flate2`.
#[cfg(feature = "dump-create")]
pub fn dump_to_writer<T: Serialize, W: Write>(to_dump: &T, output: W) -> Result<()> {
serialize_to_writer_impl(to_dump, output, true)
}
/// Dumps an object to a binary array in the same format as [`dump_to_writer`]
///
/// [`dump_to_writer`]: fn.dump_to_writer.html
#[cfg(feature = "dump-create")]
pub fn dump_binary<T: Serialize>(o: &T) -> Vec<u8> {
let mut v = Vec::new();
dump_to_writer(o, &mut v).unwrap();
v
}
/// Dumps an encodable object to a file at a given path, in the same format as [`dump_to_writer`]
///
/// If a file already exists at that path it will be overwritten. The files created are encoded with
/// the `bincode` crate and then compressed with the `flate2` crate.
///
/// [`dump_to_writer`]: fn.dump_to_writer.html
#[cfg(feature = "dump-create")]
pub fn dump_to_file<T: Serialize, P: AsRef<Path>>(o: &T, path: P) -> Result<()> {
let out = BufWriter::new(File::create(path)?);
dump_to_writer(o, out)
}
/// A helper function for decoding and decompressing data from a reader
#[cfg(feature = "dump-load")]
pub fn from_reader<T: DeserializeOwned, R: BufRead>(input: R) -> Result<T> {
deserialize_from_reader_impl(input, true)
}
/// Returns a fully loaded object from a binary dump.
///
/// This function panics if the dump is invalid.
#[cfg(feature = "dump-load")]
pub fn from_binary<T: DeserializeOwned>(v: &[u8]) -> T {
from_reader(v).unwrap()
}
/// Returns a fully loaded object from a binary dump file.
#[cfg(feature = "dump-load")]
pub fn from_dump_file<T: DeserializeOwned, P: AsRef<Path>>(path: P) -> Result<T> {
let contents = std::fs::read(path)?;
from_reader(&contents[..])
}
/// To be used when serializing a [`SyntaxSet`] to a file. A [`SyntaxSet`]
/// itself shall not be compressed, because the data for its lazy-loaded
/// syntaxes are already compressed. Compressing another time just results in
/// bad performance.
#[cfg(feature = "dump-create")]
pub fn dump_to_uncompressed_file<T: Serialize, P: AsRef<Path>>(o: &T, path: P) -> Result<()> {
let out = BufWriter::new(File::create(path)?);
serialize_to_writer_impl(o, out, false)
}
/// To be used when deserializing a [`SyntaxSet`] that was previously written to
/// file using [dump_to_uncompressed_file].
#[cfg(feature = "dump-load")]
pub fn from_uncompressed_dump_file<T: DeserializeOwned, P: AsRef<Path>>(path: P) -> Result<T> {
let contents = std::fs::read(path)?;
deserialize_from_reader_impl(&contents[..], false)
}
/// To be used when deserializing a [`SyntaxSet`] from raw data, for example
/// data that has been embedded in your own binary with the [`include_bytes!`]
/// macro.
#[cfg(feature = "dump-load")]
pub fn from_uncompressed_data<T: DeserializeOwned>(v: &[u8]) -> Result<T> {
deserialize_from_reader_impl(v, false)
}
/// Private low level helper function used to implement the public API.
#[cfg(feature = "dump-create")]
fn serialize_to_writer_impl<T: Serialize, W: Write>(to_dump: &T, output: W, use_compression: bool) -> Result<()> {
if use_compression {
let mut encoder = ZlibEncoder::new(output, Compression::best());
serialize_into(&mut encoder, to_dump)
} else {
serialize_into(output, to_dump)
}
}
/// Private low level helper function used to implement the public API.
#[cfg(feature = "dump-load")]
fn deserialize_from_reader_impl<T: DeserializeOwned, R: BufRead>(input: R, use_compression: bool) -> Result<T> {
if use_compression {
let mut decoder = ZlibDecoder::new(input);
deserialize_from(&mut decoder)
} else {
deserialize_from(input)
}
}
#[cfg(feature = "default-syntaxes")]
impl SyntaxSet {
/// Instantiates a new syntax set from a binary dump of Sublime Text's default open source
/// syntax definitions.
///
/// These dumps are included in this library's binary for convenience.
///
/// This method loads the version for parsing line strings with no `\n` characters at the end.
/// If you're able to efficiently include newlines at the end of strings, use
/// [`load_defaults_newlines`] since it works better. See [`SyntaxSetBuilder::add_from_folder`]
/// for more info on this issue.
///
/// This is the recommended way of creating a syntax set for non-advanced use cases. It is also
/// significantly faster than loading the YAML files.
///
/// Note that you can load additional syntaxes after doing this. If you want you can even use
/// the fact that SyntaxDefinitions are serializable with the bincode crate to cache dumps of
/// additional syntaxes yourself.
///
/// [`load_defaults_newlines`]: #method.load_defaults_nonewlines
/// [`SyntaxSetBuilder::add_from_folder`]: struct.SyntaxSetBuilder.html#method.add_from_folder
pub fn load_defaults_nonewlines() -> SyntaxSet {
#[cfg(feature = "metadata")]
{
let mut ps: SyntaxSet = from_uncompressed_data(include_bytes!("../assets/default_nonewlines.packdump")).unwrap();
let metadata = from_binary(include_bytes!("../assets/default_metadata.packdump"));
ps.metadata = metadata;
ps
}
#[cfg(not(feature = "metadata"))]
{
from_uncompressed_data(include_bytes!("../assets/default_nonewlines.packdump")).unwrap()
}
}
/// Same as [`load_defaults_nonewlines`] but for parsing line strings with newlines at the end.
///
/// These are separate methods because thanks to linker garbage collection, only the serialized
/// dumps for the method(s) you call will be included in the binary (each is ~200kb for now).
///
/// [`load_defaults_nonewlines`]: #method.load_defaults_nonewlines
pub fn load_defaults_newlines() -> SyntaxSet {
#[cfg(feature = "metadata")]
{
let mut ps: SyntaxSet = from_uncompressed_data(include_bytes!("../assets/default_newlines.packdump")).unwrap();
let metadata = from_binary(include_bytes!("../assets/default_metadata.packdump"));
ps.metadata = metadata;
ps
}
#[cfg(not(feature = "metadata"))]
{
from_uncompressed_data(include_bytes!("../assets/default_newlines.packdump")).unwrap()
}
}
}
#[cfg(all(feature = "default-themes"))]
impl ThemeSet {
/// Loads the set of default themes
/// Currently includes (these are the keys for the map):
///
/// - `base16-ocean.dark`,`base16-eighties.dark`,`base16-mocha.dark`,`base16-ocean.light`
/// - `InspiredGitHub` from [here](https://github.com/sethlopezme/InspiredGitHub.tmtheme)
/// - `Solarized (dark)` and `Solarized (light)`
pub fn load_defaults() -> ThemeSet {
from_binary(include_bytes!("../assets/default.themedump"))
}
}
#[cfg(test)]
mod tests {
#[cfg(all(feature = "yaml-load", feature = "dump-create", feature = "dump-load", feature = "parsing"))]
#[test]
fn can_dump_and_load() {
use super::*;
use crate::parsing::SyntaxSetBuilder;
let mut builder = SyntaxSetBuilder::new();
builder.add_from_folder("testdata/Packages", false).unwrap();
let ss = builder.build();
let bin = dump_binary(&ss);
println!("{:?}", bin.len());
let ss2: SyntaxSet = from_binary(&bin[..]);
assert_eq!(ss.syntaxes().len(), ss2.syntaxes().len());
}
#[cfg(all(feature = "yaml-load", feature = "dump-create", feature = "dump-load"))]
#[test]
fn dump_is_deterministic() {
use super::*;
use crate::parsing::SyntaxSetBuilder;
let mut builder1 = SyntaxSetBuilder::new();
builder1.add_from_folder("testdata/Packages", false).unwrap();
let ss1 = builder1.build();
let bin1 = dump_binary(&ss1);
let mut builder2 = SyntaxSetBuilder::new();
builder2.add_from_folder("testdata/Packages", false).unwrap();
let ss2 = builder2.build();
let bin2 = dump_binary(&ss2);
// This is redundant, but assert_eq! can be really slow on a large
// vector, so check the length first to fail faster.
assert_eq!(bin1.len(), bin2.len());
assert_eq!(bin1, bin2);
}
#[cfg(feature = "default-themes")]
#[test]
fn has_default_themes() {
use crate::highlighting::ThemeSet;
let themes = ThemeSet::load_defaults();
assert!(themes.themes.len() > 4);
}
}
|
use crate::http::{delete_step_page, create_step_page, update_step_page, read_step_page};
use crate::storage::{
storage_actor::StorageExecutor,
storage_driver::StorageDriver,
};
use actix::{SyncArbiter, System, SystemRunner};
use actix_files::Files;
use actix_web::{
middleware,
web::{delete, get, post, put, resource},
App, HttpServer,
};
use anyhow::Result;
use proger_core::{
API_URL_V1_DELETE_PAGE, API_URL_V1_CREATE_STEP_PAGE, API_URL_V1_UPDATE_STEP_PAGE, API_URL_V1_READ_STEP_PAGE,
};
use tokio::runtime::Runtime;
/// The server instance
pub struct Server {
runner: SystemRunner,
}
impl Server {
pub fn new<T: StorageDriver + Sync + Send + Clone>(host: String, storage: T) -> Result<Self> {
// Build a new actor system
let runner = System::new("backend");
let _ = storage.connect();
let storage_executor = SyncArbiter::start(1, move || StorageExecutor {
driver: storage.clone(),
// TODO how to avoid unwrap here?
rt: Runtime::new().unwrap(),
});
// Create the server
let server = HttpServer::new(move || {
App::new()
.wrap(middleware::Logger::default())
.data(storage_executor.clone())
.service(resource(API_URL_V1_CREATE_STEP_PAGE).route(post().to(create_step_page::<T>)))
.service(resource(API_URL_V1_UPDATE_STEP_PAGE).route(put().to(update_step_page::<T>)))
.service(
resource(API_URL_V1_DELETE_PAGE).route(delete().to(delete_step_page::<T>)),
)
.service(resource(API_URL_V1_READ_STEP_PAGE).route(get().to(read_step_page::<T>)))
.service(Files::new("/", "./static/").index_file("index.html"))
});
server.bind(host.as_str())?.run();
Ok(Server { runner })
}
/// Start the server
pub fn start(self) -> Result<()> {
// Start the actual main server
self.runner.run()?;
Ok(())
}
}
|
pub fn is_subsequence(s: String, t: String) -> bool {
fn core(s: &str, t: &str) -> bool {
if s.len() == 0 {
return true
}
if t.len() == 0 {
return false
}
if s.len() > t.len() {
return false
}
// Now we can safely assume 1 <= s.len() <= t.len()
let s1 = s.chars().next().unwrap();
let t1 = t.chars().next().unwrap();
if s1 == t1 {
core(&s[1..], &t[1..])
} else {
core(s, &t[1..])
}
}
core(s.as_str(), t.as_str())
}
#[test]
fn test_is_subsequence() {
assert_eq!(is_subsequence("abc".to_string(), "ahbgdc".to_string()), true);
assert_eq!(is_subsequence("axc".to_string(), "ahbgdc".to_string()), false);
} |
use crate::{
automata::{
eliminate_epsilon_transitions, reachable_states, TyAlphabet, TyAutomaton, TyAutomatonData,
TypeAutomataEdgesVisitor, TypeStateData, TypeStateVisitor,
},
error::TyError,
fold::TyFoldable,
polarity::{CorrectPolarityFolder, Polarity, PolarityVisitor},
scope::{ScopeVisitor, ScopedTy, TyScope},
ty::TyBuilder,
Ty, TyDatabase,
};
use hashbrown::HashSet;
use indexmap::IndexMap;
use valis_ds::{hashed::Hashed, set::sorted_vector::VectorSet};
use valis_hir::{fold::HirFoldable, prelude as hir};
pub(crate) fn ty_from_hir(db: &impl TyDatabase, hir_node: hir::Type) -> Result<Ty, TyError> {
let mut visitor = TyBuilder::new(db);
let ty = hir_node.visit_with(&mut visitor)?;
let polarity = ty.recompute_overall_polarity(db);
Ok(db.ty_coerce_polarity(ty, polarity))
}
pub(crate) fn ty_recompute_overall_polarity(db: &impl TyDatabase, ty: Ty) -> Option<Polarity> {
ty.visit_with(&mut PolarityVisitor::new(db))
}
pub(crate) fn ty_coerce_polarity(
db: &impl TyDatabase,
ty: Ty,
assign_polarity: Option<Polarity>,
) -> Ty {
ty.fold_with(&mut CorrectPolarityFolder::new(db, assign_polarity))
}
pub(crate) fn ty_scope_data(db: &impl TyDatabase, ty: Ty) -> TyScope {
let mut visitor = ScopeVisitor::new(db);
ty.visit_with(&mut visitor);
visitor.into_scope()
}
pub(crate) fn ty_subterms(db: &impl TyDatabase, ty: Ty) -> HashSet<Ty> {
let scope_data: TyScope = ty.scope_data(db);
Hashed::into_inner(scope_data)
.ty_mapping
.into_iter()
.collect()
}
// TODO find some other method, this is a unnecessarily expense way to check ty
// equality while ignoring polarity
pub(crate) fn ty_structual_eq(db: &impl TyDatabase, lhs: Ty, rhs: Ty) -> bool {
// by assigning both types the None (indicating non-polar type) polarity, we can
// check that they intern to the same underlying type
let mut visitor = CorrectPolarityFolder::new(db, None);
let blanked_lhs = lhs.fold_with(&mut visitor);
let blanked_rhs = rhs.fold_with(&mut visitor);
blanked_lhs == blanked_rhs
}
pub(crate) fn ty_to_type_automaton(
db: &impl TyDatabase,
ty: Ty,
) -> Result<(TyAutomaton, TyScope), TyError> {
if None == ty.polarity(db) {
return Err(TyError::InvalidPolarType);
};
// generate a structure containing all the identifiers, types, and placeholders
// used in the ty
let scope_data = ty.scope_data(db);
// The visit the ty again to generate sets of transitions, epsilon and not. The
// start state falls out of this analysis
let mut edges_visitor = TypeAutomataEdgesVisitor::new(db, &scope_data);
let start_state = ty.visit_with(&mut edges_visitor)?;
let (mut transitions, epsilon_transitions) = edges_visitor.into_edges();
// This visit calculates the head constructor sets and extracts the polarity of
// the subterms. This is the whole of the data for each state in the type
// automata
let mut states_visitor = TypeStateVisitor::new(db, &scope_data);
ty.visit_with(&mut states_visitor)?;
let mut states = states_visitor.into_states()?;
// Then we eliminate epsilon transitions, updating head constructors and adding
// new concrete transitions
eliminate_epsilon_transitions(&mut states, &mut transitions, epsilon_transitions);
// The epsilon removal often leaves some states unreachable, which indicates
// that they can be removed without affecting the language that the automata
// accepts. The issue with this is that ScopedTy depends on a Ty's position in
// the ty_mapping table in the TyScope object.
//
// Solutions to the mapping issue:
// - We could make the mapping of TyScope to TypeStateData in TypeAutomata and
// TypeSchemeAutomata sparse, i.e. using a HashMap instead of a Vec. Pros:
// HashMaps are easier to use and more versatile than an implicit mapping
// based on position. Cons: HashMaps take up more space, and there will
// likely be many TypeAutomata/TypeSchemeAutomata
// - Output a new TyScope object that has been updated to remove the
// unreachable states (luckily placeholders and labels can remain the same
// because the epsilon/unreachable removal process is only condensing
// available information, not discarding any). Con: More TyScope objects to
// track, plus differentiating a TyScope derived directly from a Ty and a
// TyScope that has been refined in the automata conversion process is
// difficult. Pros: Allows us to retain the dense mapping from ScopedTy to
// TypeStateData
//
// Current choice is second option, creating new TyScope
let reachable_states: HashSet<ScopedTy> = {
let transitions = &transitions;
reachable_states(Some(start_state), move |state| {
transitions
.get(&state)
.cloned()
.into_iter()
.flat_map(|edges| edges.into_iter().map(|(_, next_state)| next_state))
})
.collect()
};
let (reduced_scope_data, scope_mapper) =
scope_data.filter_tys(|state, _| reachable_states.contains(&state));
let reduced_states: Vec<TypeStateData> = states
.into_iter()
.enumerate()
.map(|(idx, val)| (ScopedTy::from(idx), val))
.filter_map(|(scoped_ty, val)| scope_mapper(scoped_ty).map(|_| val))
.collect();
let reduced_transitions: IndexMap<ScopedTy, VectorSet<(TyAlphabet, ScopedTy)>> = transitions
.into_iter()
.filter_map(|(from_state, edges)| {
scope_mapper(from_state).map(|new_from_state| {
(
new_from_state,
edges
.into_iter()
.filter_map(|(alpha, to_state)| {
scope_mapper(to_state).map(|new_to_state| (alpha, new_to_state))
})
.collect(),
)
})
})
.collect();
// This should never panic because the reachability should always include the
// start state
let new_start_state = scope_mapper(start_state).unwrap();
let type_automaton: TyAutomaton = TyAutomatonData {
transitions: reduced_transitions,
states: reduced_states,
start_state: new_start_state,
}
.into();
Ok((type_automaton, reduced_scope_data))
}
|
use std::fs::File;
use std::io::Read;
fn main() {
let mut file = File::open("d02-input").unwrap();
let mut input = String::new();
file.read_to_string(&mut input).unwrap();
let mut valid_pass = 0;
for line in input.lines() {
let line2 = &line.replace(":", " ");
let chunks: Vec<_> = line2.split_whitespace().collect();
// chunks[0] = range, chunks[1] = char, chunks[2] = password
let bounds: Vec<_> = chunks[0].split("-").collect();
// bounds[0] = lower limit, bounds[1] = upper limit
let count = chunks[2].matches(chunks[1]).count();
if bounds[0].parse::<usize>().unwrap() <= count {
if count <= bounds[1].parse::<usize>().unwrap() {
valid_pass += 1;
}
}
}
println!("Number of valid passwords: {}", valid_pass);
} |
#![doc(test(attr(deny(warnings))))]
//! Joachim Henke's basE91 encoding implementation for Rust
//! http://base91.sourceforge.net
use std::iter::Iterator;
const ENTAB: [u8; 91] = [
b'A', b'B', b'C', b'D', b'E', b'F', b'G', b'H', b'I', b'J', b'K', b'L', b'M',
b'N', b'O', b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W', b'X', b'Y', b'Z',
b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i', b'j', b'k', b'l', b'm',
b'n', b'o', b'p', b'q', b'r', b's', b't', b'u', b'v', b'w', b'x', b'y', b'z',
b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'!', b'#', b'$',
b'%', b'&', b'(', b')', b'*', b'+', b',', b'.', b'/', b':', b';', b'<', b'=',
b'>', b'?', b'@', b'[', b']', b'^', b'_', b'`', b'{', b'|', b'}', b'~', b'"'
];
const DETAB: [u8; 256] = [
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 62, 90, 63, 64, 65, 66, 91, 67, 68, 69, 70, 71, 91, 72, 73,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 74, 75, 76, 77, 78, 79,
80, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 81, 91, 82, 83, 84,
85, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 86, 87, 88, 89, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91,
91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91
];
pub fn iter_encode<I, O>(data: I, mut out: O)
where
I: Iterator<Item=u8>,
O: FnMut(u8)
{
let mut key: u32;
let mut rem: u32 = 0;
let mut shift: u32 = 0;
for b in data {
rem |= (b as u32) << shift;
shift += 8;
if shift > 13 {
key = rem & 8191;
if key > 88 {
rem >>= 13;
shift -= 13;
} else {
key = rem & 16383;
rem >>= 14;
shift -= 14;
}
out(ENTAB[(key % 91) as usize]);
out(ENTAB[(key / 91) as usize]);
}
}
if shift > 0 {
out(ENTAB[(rem % 91) as usize]);
if shift > 7 || rem > 90 {
out(ENTAB[(rem / 91) as usize]);
}
}
}
pub fn iter_decode<I, O>(data: I, mut out: O)
where
I: Iterator<Item=u8>,
O: FnMut(u8)
{
let mut buf: i32 = -1;
let mut key: i32;
let mut rem: i32 = 0;
let mut shift: i32 = 0;
for b in data.map(|b| b as usize) {
key = DETAB[b] as i32;
if key == 91 {
continue;
}
if buf == -1 {
buf = key;
} else {
buf += key * 91;
rem |= buf << shift;
shift += if (buf & 8191) > 88 { 13 } else { 14 };
while {
out(rem as u8);
rem >>= 8;
shift -= 8;
shift > 7
} {};
buf = -1;
}
}
if buf != -1 {
out((rem | buf << shift) as u8);
}
}
pub fn slice_encode(value: &[u8]) -> Vec<u8> {
let mut result = Vec::with_capacity(value.len() * 13 / 10);
iter_encode(value.iter().map(|v| *v), |v| result.push(v));
result
}
pub fn slice_decode(value: &[u8]) -> Vec<u8> {
let mut result = Vec::with_capacity(value.len());
iter_decode(value.iter().map(|v| *v), |v| result.push(v));
result
}
#[cfg(test)]
mod tests {
use super::*;
fn get_pairs() -> Vec<(&'static str, &'static str)> {
let data = vec![
("test", "fPNKd"),
("vest", "hPNKd"),
(
"5Fq99ztBNtv+NsWSdNS04dnyiC81Qf4dsbz6Y5elKaR+KVsAWoiK0SdBiVg2hC/FXpX0Zozw8Hd4",
"qRqgWoRZ!L0/|msb}%dHM3;BQJX%1Q$XowN0=kHTcR5<Q81jMgz1qelja%$gNQva~1;1C:Zp>I.E2*Df))Xxc>Gq_JDzbC"
)
];
data
}
#[test]
fn test_encode() {
for pair in get_pairs() {
assert_eq!(
&String::from_utf8_lossy(&slice_encode(pair.0.as_bytes())[..]),
pair.1
);
}
}
#[test]
fn test_decode() {
for pair in get_pairs() {
assert_eq!(
&String::from_utf8_lossy(&slice_decode(pair.1.as_bytes())[..]),
pair.0
);
}
}
#[test]
fn test_integrity() {
use std::collections::hash_map::RandomState;
use std::hash::{BuildHasher, Hasher};
let mut buf: [u8; 256] = [0; 256];
for _ in 0..10000 {
for i in 0..32 {
let mut hasher = RandomState::new().build_hasher();
hasher.write_u32(1);
let value = hasher.finish();
let bytes = value.to_ne_bytes();
for j in 0 .. 8 {
buf[i*8 + j] = bytes[j];
}
}
let encoded = slice_encode(&buf);
let decoded = slice_decode(&encoded);
assert_eq!(&decoded[..], &buf[..]);
}
}
}
|
use std::error::Error;
#[macro_use] extern crate text_io;
extern crate clap;
use clap::{Arg, App, SubCommand, value_t};
mod modules;
pub use modules::day1;
pub use modules::day2;
pub use modules::day3;
pub use modules::day4;
pub use modules::day5;
pub use modules::day6;
fn main() -> Result<(), Box<dyn Error>> {
let matches = App::new("AdventOfCode2019")
.version("1.0")
.author("Tomas Farias")
.subcommand(SubCommand::with_name("day1")
.about("Day 1 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file"))
.subcommand(SubCommand::with_name("part1"))
.subcommand(SubCommand::with_name("part2")))
.subcommand(SubCommand::with_name("day2")
.about("Day 2 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file"))
.subcommand(SubCommand::with_name("part1")
.arg(Arg::with_name("noun")
.short("n")
.takes_value(true)
.required(true)
.help("Sets the intcode machine noun"))
.arg(Arg::with_name("verb")
.short("v")
.takes_value(true)
.required(true)
.help("Sets the intcode machine verb")))
.subcommand(SubCommand::with_name("part2")
.arg(Arg::with_name("target")
.short("t")
.takes_value(true)
.required(true)
.help("Sets the intcode machine target"))))
.subcommand(SubCommand::with_name("day3")
.about("Day 3 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file"))
.subcommand(SubCommand::with_name("part1"))
.subcommand(SubCommand::with_name("part2")))
.subcommand(SubCommand::with_name("day4")
.about("Day 4 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file"))
.subcommand(SubCommand::with_name("part1"))
.subcommand(SubCommand::with_name("part2")))
.subcommand(SubCommand::with_name("day5")
.about("Day 5 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file")))
.subcommand(SubCommand::with_name("day6")
.about("Day 6 challenge")
.arg(Arg::with_name("input")
.short("i")
.takes_value(true)
.required(true)
.help("Sets the path to the challenge input file"))
.subcommand(SubCommand::with_name("part1"))
.subcommand(SubCommand::with_name("part2")))
.get_matches();
let result = match matches.subcommand() {
("day1", Some(day1_matches)) => {
let input = day1_matches.value_of("input").unwrap();
match day1_matches.subcommand() {
("part1", _) => {
day1::run(input, 1).unwrap()
},
("part2", _) => {
day1::run(input, 2).unwrap()
},
_ => {
println!("Unrecognized command or unsolved day 2 challenge part");
return Ok(());
},
}
},
("day2", Some(day2_matches)) => {
let input = day2_matches.value_of("input").unwrap();
match day2_matches.subcommand() {
("part1", Some(day2_part1_matches)) => {
let noun = value_t!(day2_part1_matches.value_of("noun"), i32).unwrap();
let verb = value_t!(day2_part1_matches.value_of("verb"), i32).unwrap();
day2::run_part1(input, noun, verb).unwrap()
},
("part2", Some(day2_part2_matches)) => {
let target = value_t!(day2_part2_matches.value_of("target"), i32).unwrap();
day2::run_part2(input, target).unwrap()
},
_ => {
println!("Unrecognized command or unsolved day 2 challenge part");
return Ok(());
},
}
},
("day3", Some(day3_matches)) => {
let input = day3_matches.value_of("input").unwrap();
match day3_matches.subcommand() {
("part1", _) => {
day3::run_part1(input).unwrap()
},
("part2", _) => {
day3::run_part2(input).unwrap()
},
_ => {
println!("Unrecognized command or unsolved day 3 challenge part");
return Ok(());
},
}
},
("day4", Some(day4_matches)) => {
let input = day4_matches.value_of("input").unwrap();
match day4_matches.subcommand() {
("part1", _) => {
day4::run_part1(input).unwrap()
},
("part2", _) => {
day4::run_part2(input).unwrap()
},
_ => {
println!("Unrecognized command or unsolved day 4 challenge part");
return Ok(());
},
}
},
("day5", Some(day5_matches)) => {
let input = day5_matches.value_of("input").unwrap();
day5::run(input).unwrap()
},
("day6", Some(day6_matches)) => {
let input = day6_matches.value_of("input").unwrap();
match day6_matches.subcommand() {
("part1", _) => {
day6::run_part1(input).unwrap()
},
("part2", _) => {
day6::run_part2(input).unwrap()
},
_ => {
println!("Unrecognized command or unsolved day 6 challenge part");
return Ok(());
},
}
},
_ => {
println!("Unrecognized command or unsolved day challenge");
return Ok(());
},
};
println!("{}", result);
Ok(())
}
|
use failure::Error;
use rocket::State;
use rocket_contrib::json::{Json, JsonValue};
use crate::Context;
use crate::model::link::{ LinkDAO, Link };
#[get("/all")]
pub fn all(ctx: State<Context>) -> Result<JsonValue, Error> {
let link_dao = LinkDAO::new(ctx.db.clone())?;
match link_dao.get_latest() {
Ok(x) => Ok(json!({ "links": x })),
Err(_) => Err(format_err!("foo"))
}
}
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn deser_header_add_layer_version_permission_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_1: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_1.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_1 = var_1;
Ok(var_1.pop())
}
}
pub fn deser_header_add_permission_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_2: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_2.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_2 = var_2;
Ok(var_2.pop())
}
}
pub fn deser_header_create_alias_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_3: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_3.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_3 = var_3;
Ok(var_3.pop())
}
}
pub fn deser_header_create_event_source_mapping_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_4: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_4.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_4 = var_4;
Ok(var_4.pop())
}
}
pub fn deser_header_create_function_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_5: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_5.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_5 = var_5;
Ok(var_5.pop())
}
}
pub fn deser_header_delete_alias_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_6: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_6.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_6 = var_6;
Ok(var_6.pop())
}
}
pub fn deser_header_delete_event_source_mapping_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_7: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_7.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_7 = var_7;
Ok(var_7.pop())
}
}
pub fn deser_header_delete_function_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_8: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_8.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_8 = var_8;
Ok(var_8.pop())
}
}
pub fn deser_header_delete_function_code_signing_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_9: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_9.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_9 = var_9;
Ok(var_9.pop())
}
}
pub fn deser_header_delete_function_concurrency_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_10: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_10.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_10 = var_10;
Ok(var_10.pop())
}
}
pub fn deser_header_delete_function_event_invoke_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_11: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_11.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_11 = var_11;
Ok(var_11.pop())
}
}
pub fn deser_header_delete_layer_version_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_12: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_12.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_12 = var_12;
Ok(var_12.pop())
}
}
pub fn deser_header_delete_provisioned_concurrency_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_13: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_13.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_13 = var_13;
Ok(var_13.pop())
}
}
pub fn deser_header_get_account_settings_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_14: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_14.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_14 = var_14;
Ok(var_14.pop())
}
}
pub fn deser_header_get_alias_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_15: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_15.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_15 = var_15;
Ok(var_15.pop())
}
}
pub fn deser_header_get_event_source_mapping_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_16: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_16.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_16 = var_16;
Ok(var_16.pop())
}
}
pub fn deser_header_get_function_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_17: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_17.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_17 = var_17;
Ok(var_17.pop())
}
}
pub fn deser_header_get_function_code_signing_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_18: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_18.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_18 = var_18;
Ok(var_18.pop())
}
}
pub fn deser_header_get_function_concurrency_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_19: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_19.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_19 = var_19;
Ok(var_19.pop())
}
}
pub fn deser_header_get_function_configuration_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_20: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_20.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_20 = var_20;
Ok(var_20.pop())
}
}
pub fn deser_header_get_function_event_invoke_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_21: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_21.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_21 = var_21;
Ok(var_21.pop())
}
}
pub fn deser_header_get_layer_version_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_22: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_22.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_22 = var_22;
Ok(var_22.pop())
}
}
pub fn deser_header_get_layer_version_by_arn_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_23: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_23.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_23 = var_23;
Ok(var_23.pop())
}
}
pub fn deser_header_get_layer_version_policy_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_24: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_24.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_24 = var_24;
Ok(var_24.pop())
}
}
pub fn deser_header_get_policy_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_25: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_25.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_25 = var_25;
Ok(var_25.pop())
}
}
pub fn deser_header_get_provisioned_concurrency_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_26: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_26.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_26 = var_26;
Ok(var_26.pop())
}
}
pub fn deser_header_invoke_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_27: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_27.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_27 = var_27;
Ok(var_27.pop())
}
}
pub fn deser_header_invoke_function_error(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("X-Amz-Function-Error").iter();
let var_28: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_28.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_28 = var_28;
Ok(var_28.pop())
}
}
pub fn deser_header_invoke_log_result(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("X-Amz-Log-Result").iter();
let var_29: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_29.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_29 = var_29;
Ok(var_29.pop())
}
}
pub fn deser_payload_invoke_payload(
body: &[u8],
) -> Result<std::option::Option<smithy_types::Blob>, crate::error::InvokeError> {
(!body.is_empty())
.then(|| Ok(smithy_types::Blob::new(body)))
.transpose()
}
pub fn deser_header_invoke_executed_version(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("X-Amz-Executed-Version").iter();
let var_30: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_30.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_30 = var_30;
Ok(var_30.pop())
}
}
pub fn deser_header_list_aliases_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_31: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_31.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_31 = var_31;
Ok(var_31.pop())
}
}
pub fn deser_header_list_event_source_mappings_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_32: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_32.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_32 = var_32;
Ok(var_32.pop())
}
}
pub fn deser_header_list_function_event_invoke_configs_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_33: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_33.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_33 = var_33;
Ok(var_33.pop())
}
}
pub fn deser_header_list_functions_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_34: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_34.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_34 = var_34;
Ok(var_34.pop())
}
}
pub fn deser_header_list_layers_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_35: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_35.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_35 = var_35;
Ok(var_35.pop())
}
}
pub fn deser_header_list_layer_versions_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_36: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_36.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_36 = var_36;
Ok(var_36.pop())
}
}
pub fn deser_header_list_provisioned_concurrency_configs_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_37: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_37.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_37 = var_37;
Ok(var_37.pop())
}
}
pub fn deser_header_list_tags_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_38: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_38.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_38 = var_38;
Ok(var_38.pop())
}
}
pub fn deser_header_list_versions_by_function_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_39: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_39.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_39 = var_39;
Ok(var_39.pop())
}
}
pub fn deser_header_publish_layer_version_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_40: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_40.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_40 = var_40;
Ok(var_40.pop())
}
}
pub fn deser_header_publish_version_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_41: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_41.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_41 = var_41;
Ok(var_41.pop())
}
}
pub fn deser_header_put_function_code_signing_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_42: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_42.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_42 = var_42;
Ok(var_42.pop())
}
}
pub fn deser_header_put_function_concurrency_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_43: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_43.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_43 = var_43;
Ok(var_43.pop())
}
}
pub fn deser_header_put_function_event_invoke_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_44: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_44.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_44 = var_44;
Ok(var_44.pop())
}
}
pub fn deser_header_put_provisioned_concurrency_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_45: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_45.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_45 = var_45;
Ok(var_45.pop())
}
}
pub fn deser_header_remove_layer_version_permission_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_46: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_46.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_46 = var_46;
Ok(var_46.pop())
}
}
pub fn deser_header_remove_permission_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_47: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_47.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_47 = var_47;
Ok(var_47.pop())
}
}
pub fn deser_header_tag_resource_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_48: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_48.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_48 = var_48;
Ok(var_48.pop())
}
}
pub fn deser_header_untag_resource_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_49: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_49.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_49 = var_49;
Ok(var_49.pop())
}
}
pub fn deser_header_update_alias_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_50: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_50.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_50 = var_50;
Ok(var_50.pop())
}
}
pub fn deser_header_update_event_source_mapping_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_51: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_51.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_51 = var_51;
Ok(var_51.pop())
}
}
pub fn deser_header_update_function_code_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_52: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_52.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_52 = var_52;
Ok(var_52.pop())
}
}
pub fn deser_header_update_function_configuration_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_53: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_53.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_53 = var_53;
Ok(var_53.pop())
}
}
pub fn deser_header_update_function_event_invoke_config_retry_after_seconds(
header_map: &http::HeaderMap,
) -> Result<std::option::Option<std::string::String>, smithy_http::header::ParseError> {
let headers = header_map.get_all("Retry-After").iter();
let var_54: Vec<std::string::String> = smithy_http::header::read_many(headers)?;
if var_54.len() > 1 {
Err(smithy_http::header::ParseError)
} else {
let mut var_54 = var_54;
Ok(var_54.pop())
}
}
|
use crate::decoder::decoder::{Decoder, DecoderResult};
use crate::decoder::instructions::decode_expression;
use crate::decoder::types::{
decode_function_type, decode_global_type, decode_limits, decode_memory_type, decode_value_type,
};
use crate::decoder::values::{decode_name, decode_u32};
use crate::structure::*;
const SECTION_ID_CUSTOM: u8 = 0;
const SECTION_ID_TYPE: u8 = 1;
const SECTION_ID_IMPORT: u8 = 2;
const SECTION_ID_FUNCTION: u8 = 3;
const SECTION_ID_TABLE: u8 = 4;
const SECTION_ID_MEMORY: u8 = 5;
const SECTION_ID_GLOBAL: u8 = 6;
const SECTION_ID_EXPORT: u8 = 7;
const SECTION_ID_START: u8 = 8;
const SECTION_ID_ELEMENT: u8 = 9;
const SECTION_ID_CODE: u8 = 10;
const SECTION_ID_DATA: u8 = 11;
fn decode_section<F, R>(decoder: &mut Decoder, section_id: u8, mut callback: F) -> DecoderResult<()>
where
F: FnMut(&mut Decoder) -> DecoderResult<R>,
{
if decoder.match_byte(section_id) {
let size = decode_u32(decoder)?;
let end_offset = decoder.offset + size as usize;
let closure_decoder = &mut decoder.clone();
callback(closure_decoder)?;
if closure_decoder.offset != end_offset {
return Err(closure_decoder.produce_error("Invalid section size"));
}
decoder.offset = closure_decoder.offset;
}
Ok(())
}
// https://webassembly.github.io/spec/core/binary/modules.html#custom-section
fn decode_custom_sections<'a>(
decoder: &mut Decoder<'a>,
custom_sections: &mut Vec<CustomSection<'a>>,
) -> DecoderResult<()> {
while decoder.match_byte(SECTION_ID_CUSTOM) {
let size = decode_u32(decoder)?;
let end_offset = decoder.offset + size as usize;
let name = decode_name(decoder)?;
// Before creating a new slice we need to make sure that the custom section bytes slice is
// within the boundary of the original slice and also that the current offset is not
// greater than the section size.
if decoder.offset > decoder.bytes.len()
|| end_offset > decoder.bytes.len()
|| decoder.offset > end_offset
{
return Err(decoder.produce_error("Invalid section size"));
}
let bytes = &decoder.bytes[decoder.offset..end_offset];
custom_sections.push((name, bytes));
}
Ok(())
}
// https://webassembly.github.io/spec/core/binary/modules.html#type-section
fn decode_function_type_section(decoder: &mut Decoder) -> DecoderResult<Vec<FunctionType>> {
let mut function_types = Vec::new();
decode_section(decoder, SECTION_ID_TYPE, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let function_type = decode_function_type(decoder)?;
function_types.push(function_type);
}
Ok(())
})?;
Ok(function_types)
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-importsec
fn decode_import_section(decoder: &mut Decoder) -> DecoderResult<Vec<Import>> {
let mut imports = Vec::new();
decode_section(decoder, SECTION_ID_IMPORT, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
imports.push(Import {
module: decode_name(decoder)?,
name: decode_name(decoder)?,
descriptor: match decoder.eat_byte()? {
0x00 => ImportDescriptor::Function(decode_u32(decoder)?),
0x01 => ImportDescriptor::Table(decode_table_type(decoder)?),
0x02 => ImportDescriptor::Memory(decode_memory_type(decoder)?),
0x03 => ImportDescriptor::Global(decode_global_type(decoder)?),
_ => return Err(decoder.produce_error("Invalid import descriptor")),
},
})
}
Ok(())
})?;
Ok(imports)
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-funcsec
fn decode_function_section(decoder: &mut Decoder) -> DecoderResult<Vec<u32>> {
let mut type_indexes = Vec::new();
decode_section(decoder, SECTION_ID_FUNCTION, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let type_index = decode_u32(decoder)?;
type_indexes.push(type_index);
}
Ok(())
})?;
Ok(type_indexes)
}
// https://webassembly.github.io/spec/core/binary/types.html#binary-tabletype
fn decode_table_type(decoder: &mut Decoder) -> DecoderResult<TableType> {
let element_type = match decoder.eat_byte()? {
0x70 => ElementType::FuncRef,
_ => return Err(decoder.produce_error("Invalid element type")),
};
let limits = decode_limits(decoder)?;
Ok(TableType {
element_type,
limits,
})
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-tablesec
fn decode_table_section(decoder: &mut Decoder) -> DecoderResult<Vec<Table>> {
let mut tables = Vec::new();
decode_section(decoder, SECTION_ID_TABLE, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let table_type = decode_table_type(decoder)?;
tables.push(Table { table_type });
}
Ok(())
})?;
Ok(tables)
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-memsec
fn decode_memory_section(decoder: &mut Decoder) -> DecoderResult<Vec<Memory>> {
let mut memories = Vec::new();
decode_section(decoder, SECTION_ID_MEMORY, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
memories.push(Memory {
memory_type: decode_memory_type(decoder)?,
});
}
Ok(())
})?;
Ok(memories)
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-globalsec
fn decode_global_section(decoder: &mut Decoder) -> DecoderResult<Vec<Global>> {
let mut globals = Vec::new();
decode_section(decoder, SECTION_ID_GLOBAL, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
globals.push(Global {
global_type: decode_global_type(decoder)?,
init: decode_expression(decoder)?,
});
}
Ok(())
})?;
Ok(globals)
}
// https://webassembly.github.io/spec/core/binary/modules.html#binary-exportsec
fn decode_export_section(decoder: &mut Decoder) -> DecoderResult<Vec<Export>> {
let mut exports = Vec::new();
decode_section(decoder, SECTION_ID_EXPORT, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
exports.push(Export {
name: decode_name(decoder)?,
descriptor: match decoder.eat_byte()? {
0x00 => ExportDescriptor::Function(decode_u32(decoder)?),
0x01 => ExportDescriptor::Table(decode_u32(decoder)?),
0x02 => ExportDescriptor::Memory(decode_u32(decoder)?),
0x03 => ExportDescriptor::Global(decode_u32(decoder)?),
_ => return Err(decoder.produce_error("Invalid export descriptor")),
},
})
}
Ok(())
})?;
Ok(exports)
}
// https://webassembly.github.io/spec/core/binary/modules.html#start-section
fn decode_start_section(decoder: &mut Decoder) -> DecoderResult<Option<StartFunction>> {
if decoder.match_byte(SECTION_ID_START) {
decoder.eat_byte()?; // section size
Ok(Some(StartFunction {
function: decode_u32(decoder)?,
}))
} else {
Ok(None)
}
}
// https://webassembly.github.io/spec/core/binary/modules.html#element-section
fn decode_element_section(decoder: &mut Decoder) -> DecoderResult<Vec<Element>> {
let mut elements = Vec::new();
decode_section(decoder, SECTION_ID_ELEMENT, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let table = decode_u32(decoder)?;
let offset = decode_expression(decoder)?;
let mut init = Vec::new();
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
init.push(decode_u32(decoder)?);
}
elements.push(Element {
table,
offset,
init,
});
}
Ok(())
})?;
Ok(elements)
}
// https://webassembly.github.io/spec/core/binary/modules.html#code-section
fn decode_code_section(
decoder: &mut Decoder,
) -> DecoderResult<Vec<(Vec<(u32, ValueType)>, Expression)>> {
let mut codes = Vec::new();
decode_section(decoder, SECTION_ID_CODE, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let code_size = decode_u32(decoder)?;
let end_offset = decoder.offset + code_size as usize;
let mut locals = Vec::new();
let mut total_local_count: u64 = 0;
let local_vector_size = decode_u32(decoder)?;
for _ in 0..local_vector_size {
let local_count = decode_u32(decoder)?;
let value_type = decode_value_type(decoder)?;
total_local_count += local_count as u64;
locals.push((local_count, value_type));
}
let base: u64 = 2;
if total_local_count > base.pow(32) {
return Err(decoder.produce_error("Too many locals"));
}
let expression = decode_expression(decoder)?;
if decoder.offset != end_offset {
return Err(decoder.produce_error("Invalid code size"));
}
codes.push((locals, expression))
}
Ok(())
})?;
Ok(codes)
}
// https://webassembly.github.io/spec/core/binary/modules.html#data-section
fn decode_data_section(decoder: &mut Decoder) -> DecoderResult<Vec<Data>> {
let mut datas = Vec::new();
decode_section(decoder, SECTION_ID_DATA, |decoder| {
let vector_size = decode_u32(decoder)?;
for _ in 0..vector_size {
let data = decode_u32(decoder)?;
let offset = decode_expression(decoder)?;
let mut init = Vec::new();
let init_vector_size = decode_u32(decoder)?;
for _ in 0..init_vector_size {
init.push(decoder.eat_byte()?)
}
datas.push(Data { data, offset, init })
}
Ok(())
})?;
Ok(datas)
}
// https://webassembly.github.io/spec/core/binary/modules.html
pub fn decode(bytes: &[u8]) -> DecoderResult<Module> {
let decoder = &mut Decoder::new(bytes);
let mut custom_sections = Vec::new();
if decoder.eat_byte()? != 0x00
|| decoder.eat_byte()? != 0x61
|| decoder.eat_byte()? != 0x73
|| decoder.eat_byte()? != 0x6d
{
return Err(decoder.produce_error("Invalid magic string"));
}
if decoder.eat_byte()? != 0x01
|| decoder.eat_byte()? != 0x00
|| decoder.eat_byte()? != 0x00
|| decoder.eat_byte()? != 0x00
{
return Err(decoder.produce_error("Invalid version number"));
}
decode_custom_sections(decoder, &mut custom_sections)?;
let function_types = decode_function_type_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let imports = decode_import_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let function_type_indexes = decode_function_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let tables = decode_table_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let memories = decode_memory_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let globals = decode_global_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let exports = decode_export_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let start = decode_start_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let elements = decode_element_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let codes = decode_code_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
let data = decode_data_section(decoder)?;
decode_custom_sections(decoder, &mut custom_sections)?;
if decoder.offset != decoder.bytes.len() {
return Err(decoder.produce_error("Unexpected end of file"));
}
if function_type_indexes.len() != codes.len() {
return Err(decoder.produce_error("Function indexes and codes size mismatch"));
}
let mut functions = Vec::new();
for i in 0..function_type_indexes.len() {
let type_index = function_type_indexes[i];
let (locals, body) = &codes[i];
// TODO: Understand if it's really necessary to clone the data structure here.
functions.push(Function {
function_type: type_index,
locals: locals.clone(),
body: body.clone(),
})
}
Ok(Module {
custom_sections,
function_types,
functions,
tables,
memories,
globals,
elements,
data,
start,
imports,
exports,
})
}
|
extern crate extended_collections;
extern crate rand;
use extended_collections::bp_tree::{BpMap, Result};
use self::rand::{thread_rng, Rng};
use std::fs;
use std::panic;
use std::vec::Vec;
fn teardown(test_name: &str) {
fs::remove_file(format!("{}.dat", test_name)).ok();
}
fn run_test<T>(test: T, test_name: &str) -> Result<()>
where
T: FnOnce() -> Result<()>,
{
let result = test();
teardown(test_name);
result
}
#[test]
fn int_test_bp_map() -> Result<()> {
let test_name = "int_test_bp_map";
let file_name = &format!("{}.dat", test_name);
run_test(
|| {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut map = BpMap::with_degrees(file_name, 4, 8, 3, 3)?;
let mut expected = Vec::new();
for _ in 0..10_000 {
let key = rng.gen::<u32>();
let val = rng.gen::<u64>();
map.insert(key, val)?;
expected.push((key, val));
}
expected.reverse();
expected.sort_by(|l, r| l.0.cmp(&r.0));
expected.dedup_by_key(|pair| pair.0);
map = BpMap::open(&format!("{}.dat", test_name))?;
assert_eq!(map.len(), expected.len());
assert_eq!(map.min()?, Some(expected[0].0));
assert_eq!(map.max()?, Some(expected[expected.len() - 1].0));
for entry in &expected {
assert!(map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, Some(entry.1));
}
thread_rng().shuffle(&mut expected);
let mut expected_len = expected.len();
for entry in expected {
let old_entry = map.remove(&entry.0)?;
expected_len -= 1;
assert_eq!(old_entry, Some((entry.0, entry.1)));
assert_eq!(map.len(), expected_len);
}
Ok(())
},
test_name,
)
}
|
use std::sync::Arc;
use vulkano::device::{Device, DeviceExtensions, QueuesIter};
use vulkano::instance::{self, Features, Instance, InstanceExtensions, PhysicalDevice, QueueFamily,
debug::DebugCallback};
use vulkano::swapchain::Surface;
use vulkano_win::{self, VkSurfaceBuild};
use winit;
pub fn init_events_loop() -> winit::EventsLoop {
winit::EventsLoop::new()
}
pub fn init_vulkan(events_loop: &winit::EventsLoop) -> Arc<Surface<winit::Window>> {
let instance = init_vulkan_instance();
init_vulkan_debug_callbacks(instance.clone());
init_device(instance.clone());
init_window()
.build_vk_surface(&events_loop, instance.clone())
.unwrap()
}
fn init_window() -> winit::WindowBuilder {
winit::WindowBuilder::new()
.with_dimensions(800, 600)
.with_title("Vulkan")
}
fn init_vulkan_instance() -> Arc<Instance> {
Instance::new(
None,
&init_vulkan_instance_extensions(),
//INFO (danny): https://github.com/vulkano-rs/vulkano/issues/336
init_vulkan_layers()
.iter()
.map(|ln| ln.as_str())
.collect::<Vec<&str>>()
.iter(),
).expect("failed to create Vulkan instance")
}
#[cfg(feature = "vk_debug")]
fn init_vulkan_instance_extensions() -> InstanceExtensions {
println!("Instance Extensions:");
let mut extensions = vulkano_win::required_extensions();
extensions.ext_debug_report = true;
let supported = InstanceExtensions::supported_by_core().unwrap();
print!(" ✔️ ");
println!("{:?}", supported.intersection(&extensions));
print!(" ❌ ");
println!("{:?}", supported.difference(&extensions));
extensions
}
#[cfg(not(feature = "vk_debug"))]
fn init_vulkan_instance_extensions() -> InstanceExtensions {
vulkano_win::required_extensions()
}
#[cfg(feature = "vk_debug")]
fn init_vulkan_layers() -> Vec<String> {
println!("Layers:");
instance::layers_list()
.unwrap()
.filter(|layer| {
let name = layer.name();
let to_activate = name.contains("RENDERDOC") || name.contains("LUNARG");
if to_activate {
print!(" ✔️ ");
} else {
print!(" ❌ ");
}
println!(
"{} @ {} - {}",
layer.name(),
layer.implementation_version(),
layer.description()
);
to_activate
})
.map(|l| String::from(l.name()))
.collect()
}
#[cfg(not(feature = "vk_debug"))]
fn init_vulkan_layers() -> Vec<String> {
vec![]
}
#[cfg(feature = "vk_debug")]
fn init_vulkan_debug_callbacks(instance: Arc<Instance>) {
println!("Setting Up Debug Callbacks.");
DebugCallback::errors_and_warnings(&instance, |msg| {
println!("Debug callback: {:?}", msg.description);
}).ok();
}
#[cfg(not(feature = "vk_debug"))]
fn init_vulkan_debug_callbacks(instance: Arc<Instance>) {}
fn init_device(instance: Arc<Instance>) -> (Arc<Device>, QueuesIter) {
println!("Picking PhysicalDevice");
let physical_device = instance::PhysicalDevice::enumerate(&instance)
.find(|&physical_device| is_device_suitable(physical_device))
.expect("No suitable physical device found!");
println!("Picking Queue Family");
let queue_family = physical_device
.queue_families()
.find(|qf| is_queue_suitable(qf))
.expect("No suitable queue family found!");
let features = Features::none();
Device::new(
physical_device,
&features,
&init_vulkan_device_extensions(physical_device),
Some((queue_family, 1.0)),
).expect("Couldn't build device")
}
fn is_device_suitable(device: PhysicalDevice) -> bool {
let minimal_features = Features {
geometry_shader: true,
..Features::none()
};
let suitable = device.supported_features().superset_of(&minimal_features);
if suitable {
print!(" ✔️ ");
} else {
print!(" ❌ ");
}
println!(
"{}, type: {:?}\n supports: {}, driver: {}",
device.name(),
device.ty(),
device.api_version(),
device.driver_version(),
);
suitable
}
fn is_queue_suitable(queue_family: &QueueFamily) -> bool {
let suitable = queue_family.supports_graphics();
if suitable {
print!(" ✔️ ");
} else {
print!(" ❌ ");
}
println!(
" id: {}, queues_count: {}, graphics: {}, compute: {}, transfers: {}, sparse_binding: {}",
queue_family.id(),
queue_family.queues_count(),
queue_family.supports_graphics(),
queue_family.supports_compute(),
queue_family.supports_transfers(),
queue_family.supports_sparse_binding(),
);
suitable
}
#[cfg(feature = "vk_debug")]
fn init_vulkan_device_extensions(physical_device: PhysicalDevice) -> DeviceExtensions {
println!("Device Extensions:");
let mut extensions = DeviceExtensions::none();
extensions.ext_debug_marker = true;
let supported = DeviceExtensions::supported_by_device(physical_device);
print!(" ✔️ ");
println!("{:?}", supported.intersection(&extensions));
print!(" ❌ ");
println!("{:?}", supported.difference(&extensions));
extensions
}
#[cfg(not(feature = "vk_debug"))]
fn init_vulkan_device_extensions() -> DeviceExtensions {
DeviceExtensions::none()
}
|
use std::collections::HashMap;
use std::ffi::c_void;
use std::os::raw::c_char;
use std::slice;
use opendp::data::Column;
use crate::core::{FfiObject, FfiOwnership, FfiResult, FfiSlice, FfiError};
use crate::util;
use crate::util::{Type, TypeContents, c_bool, parse_type_args};
use std::fmt::Debug;
use opendp::error::Fallible;
use opendp::{fallible, err};
#[no_mangle]
pub extern "C" fn opendp_data__slice_as_object(type_args: *const c_char, raw: *const FfiSlice) -> FfiResult<*mut FfiObject> {
fn raw_to_plain<T: Clone>(raw: &FfiSlice) -> Fallible<*const c_void> {
if raw.len != 1 {
return fallible!(FFI, "The slice length must be one when creating a scalar from FfiSlice")
}
let plain = util::as_ref(raw.ptr as *const T)
.ok_or_else(|| err!(FFI, "Attempted to follow a null pointer to create an object"))?.clone();
Ok(util::into_raw(plain) as *const c_void)
}
fn raw_to_string(raw: &FfiSlice) -> Fallible<*const c_void> {
let string = util::to_str(raw.ptr as *const c_char)?.to_owned();
Ok(util::into_raw(string) as *const c_void)
}
fn raw_to_slice<T: Clone>(_raw: &FfiSlice) -> Fallible<*const c_void> {
// TODO: Need to do some extra wrapping to own the slice here.
unimplemented!()
}
#[allow(clippy::unnecessary_wraps)]
fn raw_to_vec<T: Clone>(raw: &FfiSlice) -> Fallible<*const c_void> {
let slice = unsafe { slice::from_raw_parts(raw.ptr as *const T, raw.len) };
let vec = slice.to_vec();
Ok(util::into_raw(vec) as *const c_void)
}
fn raw_to_tuple<T0: Clone + Debug, T1: Clone + Debug>(raw: &FfiSlice) -> Fallible<*const c_void> {
if raw.len != 2 {
return fallible!(FFI, "The slice length must be two when creating a tuple from FfiSlice");
}
let slice = unsafe {slice::from_raw_parts(raw.ptr as *const *const c_void, 2)};
let tuple = util::as_ref(slice[0] as *const T0).cloned()
.zip(util::as_ref(slice[1] as *const T1).cloned())
.ok_or_else(|| err!(FFI, "Attempted to follow a null pointer to create a tuple"))?;
// println!("rust: {:?}", tuple);
Ok(util::into_raw(tuple) as *const c_void)
}
let type_args = try_!(parse_type_args(type_args, 1));
let type_ = type_args[0].clone();
let raw = try_as_ref!(raw);
let val = try_!(match type_.contents {
TypeContents::PLAIN("String") => {
raw_to_string(raw)
},
TypeContents::SLICE(element_id) => {
let element = try_!(Type::of_id(&element_id));
dispatch!(raw_to_slice, [(element, @primitives)], (raw))
},
TypeContents::VEC(element_id) => {
let element = try_!(Type::of_id(&element_id));
dispatch!(raw_to_vec, [(element, @primitives)], (raw))
},
TypeContents::TUPLE(ref element_ids) => {
if element_ids.len() != 2 {
return fallible!(FFI, "Only tuples of length 2 are supported").into();
}
if let Ok(types) = element_ids.iter().map(Type::of_id).collect::<Fallible<Vec<_>>>() {
// primitively typed tuples
dispatch!(raw_to_tuple, [(types[0], @primitives), (types[1], @primitives)], (raw))
} else {
// boxy tuples
dispatch!(raw_to_plain, [(type_, @primitives)], (raw))
}
}
_ => { dispatch!(raw_to_plain, [(type_, @primitives)], (raw)) }
});
let val = unsafe { Box::from_raw(val as *mut ()) };
FfiResult::Ok(util::into_raw(FfiObject::new(type_, val, FfiOwnership::LIBRARY)))
}
#[no_mangle]
pub extern "C" fn opendp_data__object_type(this: *mut FfiObject) -> FfiResult<*mut c_char> {
let obj = try_as_ref!(this);
match util::into_c_char_p(obj.type_.descriptor.to_string()) {
Ok(v) => FfiResult::Ok(v),
Err(e) => e.into()
}
}
#[no_mangle]
pub extern "C" fn opendp_data__object_as_slice(obj: *const FfiObject) -> FfiResult<*mut FfiSlice> {
fn plain_to_raw(obj: &FfiObject) -> FfiResult<*mut FfiSlice> {
let plain: &c_void = obj.as_ref();
FfiResult::Ok(FfiSlice::new_raw(plain as *const c_void as *mut c_void, 1))
}
fn string_to_raw(obj: &FfiObject) -> FfiResult<*mut FfiSlice> {
// // FIXME: There's no way to get a CString without copying, so this leaks.
let string: &String = obj.as_ref();
FfiResult::Ok(try_!(util::into_c_char_p(string.clone())
.map(|char_p| FfiSlice::new_raw(char_p as *mut c_void, string.len() + 1))))
}
fn slice_to_raw<T>(_obj: &FfiObject) -> FfiResult<*mut FfiSlice> {
// TODO: Need to get a reference to the slice here.
unimplemented!()
}
fn vec_to_raw<T: 'static>(obj: &FfiObject) -> FfiResult<*mut FfiSlice> {
let vec: &Vec<T> = obj.as_ref();
FfiResult::Ok(FfiSlice::new_raw(vec.as_ptr() as *mut c_void, vec.len()))
}
fn tuple_to_raw<T0: 'static + Clone + Debug, T1: 'static + Clone + Debug>(obj: &FfiObject) -> FfiResult<*mut FfiSlice> {
let tuple: &(T0, T1) = obj.as_ref();
FfiResult::Ok(FfiSlice::new_raw(util::into_raw([
&tuple.0 as *const T0 as *const c_void,
&tuple.1 as *const T1 as *const c_void
]) as *mut c_void, 2))
}
let obj = try_as_ref!(obj);
match &obj.type_.contents {
TypeContents::PLAIN("String") => {
string_to_raw(obj)
},
TypeContents::SLICE(element_id) => {
let element = try_!(Type::of_id(element_id));
dispatch!(slice_to_raw, [(element, @primitives)], (obj))
},
TypeContents::VEC(element_id) => {
let element = try_!(Type::of_id(element_id));
dispatch!(vec_to_raw, [(element, @primitives)], (obj))
},
TypeContents::TUPLE(element_ids) => {
if element_ids.len() != 2 {
return fallible!(FFI, "Only tuples of length 2 are supported").into();
}
if let Ok(types) = element_ids.iter().map(Type::of_id).collect::<Fallible<Vec<_>>>() {
// primitively typed tuples
dispatch!(tuple_to_raw, [(types[0], @primitives), (types[1], @primitives)], (obj))
} else {
// boxy tuples
plain_to_raw(obj)
}
}
_ => plain_to_raw(obj)
}
}
#[no_mangle]
pub extern "C" fn opendp_data__object_free(this: *mut FfiObject) -> FfiResult<*mut ()> {
util::into_owned(this).map(|_| ()).into()
}
#[no_mangle]
/// Frees the slice, but not what the slice references!
pub extern "C" fn opendp_data__slice_free(this: *mut FfiSlice) -> FfiResult<*mut ()> {
util::into_owned(this).map(|_| ()).into()
}
#[no_mangle]
pub extern "C" fn opendp_data__str_free(this: *mut c_char) -> FfiResult<*mut ()> {
util::into_owned(this).map(|_| ()).into()
}
#[no_mangle]
pub extern "C" fn opendp_data__bool_free(this: *mut c_bool) -> FfiResult<*mut ()> {
util::into_owned(this).map(|_| ()).into()
}
// TODO: Remove this function once we have composition and/or tuples sorted out.
#[no_mangle]
pub extern "C" fn opendp_data__to_string(this: *const FfiObject) -> FfiResult<*mut c_char> {
fn monomorphize<T: 'static + std::fmt::Debug>(this: &FfiObject) -> Fallible<*mut c_char> {
let this = this.as_ref::<T>();
// FIXME: Figure out how to implement general to_string().
let string = format!("{:?}", this);
// FIXME: Leaks string.
util::into_c_char_p(string)
}
let this = try_as_ref!(this);
let type_arg = &this.type_;
dispatch!(monomorphize, [(type_arg, [
u32, u64, i32, i64, f32, f64, bool, String, u8, Column,
Vec<u32>, Vec<u64>, Vec<i32>, Vec<i64>, Vec<f32>, Vec<f64>, Vec<bool>, Vec<String>, Vec<u8>, Vec<Column>, Vec<Vec<String>>,
HashMap<String, Column>,
// FIXME: The following are for Python demo use of compositions. Need to figure this out!!!
(Box<i32>, Box<f64>),
(Box<i32>, Box<u32>),
(Box<(Box<f64>, Box<f64>)>, Box<f64>)
])], (this)).map_or_else(
|e| FfiResult::Err(util::into_raw(FfiError::from(e))),
FfiResult::Ok)
}
#[no_mangle]
pub extern "C" fn opendp_data__bootstrap() -> *const c_char {
let spec =
r#"{
"functions": [
{ "name": "to_string", "args": [ ["const FfiObject *", "this"] ], "ret": "FfiResult<const char *>" },
{ "name": "slice_as_object", "args": [ ["const char *", "type_args"], ["const void *", "raw"] ], "ret": "FfiResult<const FfiObject *>" },
{ "name": "object_type", "args": [ ["const FfiObject *", "this"] ], "ret": "FfiResult<const char *>" },
{ "name": "object_as_slice", "args": [ ["const FfiObject *", "this"] ], "ret": "FfiResult<const FfiSlice *>" },
{ "name": "object_free", "args": [ ["FfiObject *", "this"] ], "ret": "FfiResult<void *>" },
{ "name": "slice_free", "args": [ ["FfiSlice *", "this"] ], "ret": "FfiResult<void *>" },
{ "name": "str_free", "args": [ ["const char *", "this"] ], "ret": "FfiResult<void *>" },
{ "name": "bool_free", "args": [ ["bool *", "this"] ], "ret": "FfiResult<void *>" }
]
}"#;
util::bootstrap(spec)
}
#[cfg(test)]
mod tests {
use crate::util;
use opendp::error::*;
use super::*;
#[test]
fn test_data_new_number() {
let val_in = 999;
let raw_ptr = util::into_raw(val_in) as *mut c_void;
let raw_len = 1;
let raw = FfiSlice::new_raw(raw_ptr, raw_len);
let res = opendp_data__slice_as_object(util::into_c_char_p("<i32>".to_owned()).unwrap_test(), raw);
match res {
FfiResult::Ok(obj) => {
let obj = util::as_ref(obj).unwrap_test();
let val_out: &i32 = obj.as_ref();
assert_eq!(&val_in, val_out);
if let FfiResult::Err(_) = opendp_data__object_free(obj as *const FfiObject as *mut FfiObject) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
}
#[test]
fn test_data_new_string() {
let val_in = "Hello".to_owned();
let raw_ptr = util::into_c_char_p(val_in.clone()).unwrap_test() as *mut c_void;
let raw_len = val_in.len() + 1;
let raw = FfiSlice::new_raw(raw_ptr, raw_len);
let res = opendp_data__slice_as_object(util::into_c_char_p("<String>".to_owned()).unwrap_test(), raw);
match res {
FfiResult::Ok(obj) => {
let obj = util::as_ref(obj).unwrap_test();
let val_out: &String = obj.as_ref();
assert_eq!(&val_in, val_out);
if let FfiResult::Err(_) = opendp_data__object_free(obj as *const FfiObject as *mut FfiObject) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
}
#[test]
fn test_data_new_vec() {
let val_in = vec![1, 2, 3];
let raw_ptr = val_in.as_ptr() as *mut c_void;
let raw_len = val_in.len();
let raw = FfiSlice::new_raw(raw_ptr, raw_len);
match opendp_data__slice_as_object(util::into_c_char_p("<Vec<i32>>".to_owned()).unwrap_test(), raw) {
FfiResult::Ok(obj) => {
let obj = util::as_ref(obj).unwrap_test();
let val_out: &Vec<i32> = obj.as_ref();
assert_eq!(&val_in, val_out);
if let FfiResult::Err(_) = opendp_data__object_free(obj as *const FfiObject as *mut FfiObject) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
}
#[test]
fn test_data_as_raw_number() {
let val_in = 999;
let obj = FfiObject::new_raw_from_type(val_in);
match opendp_data__object_as_slice(obj) {
FfiResult::Ok(obj) => {
let raw = util::as_ref(obj).unwrap_test();
assert_eq!(raw.len, 1);
let val_out = util::as_ref(raw.ptr as *const i32).unwrap_test();
assert_eq!(&val_in, val_out);
if let FfiResult::Err(_) = opendp_data__slice_free(raw as *const FfiSlice as *mut FfiSlice) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
if let FfiResult::Err(_) = opendp_data__object_free(obj) {
panic!("Got Err!")
}
}
#[test]
fn test_data_as_raw_string() {
let val_in = "Hello".to_owned();
let obj = FfiObject::new_raw_from_type(val_in.clone());
match opendp_data__object_as_slice(obj) {
FfiResult::Ok(obj) => {
let raw = util::as_ref(obj).unwrap_test();
assert_eq!(raw.len, val_in.len() + 1);
let val_out = util::to_str(raw.ptr as *const c_char).unwrap_test().to_owned();
assert_eq!(val_in, val_out);
if let FfiResult::Err(_) = opendp_data__slice_free(raw as *const FfiSlice as *mut FfiSlice) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
if let FfiResult::Err(_) = opendp_data__object_free(obj) {
panic!("Got Err!")
}
}
#[test]
fn test_data_as_raw_vec() {
let val_in = vec![1, 2, 3];
let obj = FfiObject::new_raw_from_type(val_in.clone());
match opendp_data__object_as_slice(obj) {
FfiResult::Ok(obj) => {
let raw = util::as_ref(obj).unwrap_test();
assert_eq!(raw.len, val_in.len());
let val_out = unsafe { Vec::from_raw_parts(raw.ptr as *mut i32, raw.len, raw.len) };
assert_eq!(val_in, val_out);
if let FfiResult::Err(_) = opendp_data__slice_free(raw as *const FfiSlice as *mut FfiSlice) {
panic!("Got Err!")
}
},
FfiResult::Err(_) => panic!("Got Err!"),
}
if let FfiResult::Err(_) = opendp_data__object_free(obj) {
panic!("Got Err!")
}
}
} |
use std::process;
use colored::*;
pub fn error_bomb( arg : &str )
{
println!( "{}", "\n!!! ERROR !!!\n".red() );
match arg {
"seq_title_not_same" => println!( "Inadequate format in Multi-FASTA file." ),
"seq_len_not_same" => println!( "The length of all the sequences must be same." ),
"site_ent_len_not_same" => println!( "Length of ( *site_list ) != Length of ( *cons_re_list )" ),
"non_standard_residue" => println!( "Non-standard residue was observed in the input file." ),
"unexpected_symbol" => println!( "Unexpected symbol was observed in the input file." ),
"mot_not_20*20" => println!( "The normalized substitution matrix is not 20 × 20." ),
"mat_not_diag" => println!( "The normalized substitution matrix is not diagonal." ),
_ => (),
}
println!( "{}", "\n!!! Program halted !!!\n".red() );
process::exit( 1 );
}
|
mod collada;
mod image_namer;
mod gltf;
use cli::Args;
use errors::Result;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use util::namers::UniqueNamer;
use util::OutDir;
use db::Database;
use convert::image_namer::ImageNamer;
use connection::{Connection, ConnectionOptions};
pub fn main(args: &Args) -> Result<()> {
let out_dir_path = PathBuf::from(args.get_opt("output").unwrap());
let mut out_dir = OutDir::new(out_dir_path)?;
let db = Database::from_cli_args(args)?;
db.print_status();
let conn_options = ConnectionOptions::from_cli_args(args);
let conn = Connection::build(&db, conn_options);
let format = args.get_opt("format").map(|s| s.to_str().unwrap())
.unwrap_or("dae");
let mut image_namer = ImageNamer::build(&db, &conn);
if args.flags.contains(&"more-textures") {
image_namer.add_more_images(&db);
}
let mut models_written = 0;
let mut pngs_written = 0;
// Gives unique names to each model file to avoid name clashes.
let mut model_file_namer = UniqueNamer::new();
for (model_id, model) in db.models.iter().enumerate() {
debug!("Converting model {} ({})...", model.name, model_id);
let name = model_file_namer.get_fresh_name(format!("{}", model.name.print_safe()));
let mut f = out_dir.create_file(&format!("{}.{}", name, format))?;
let res = if format == "dae" {
let s = collada::write(&db, &conn, &image_namer, model_id);
f.write_all(s.as_bytes()).and_then(|_| f.flush())
} else if format == "glb" || format == "gltf" {
let gltf = gltf::to_gltf(&db, &conn, &image_namer, model_id);
if format == "glb" {
gltf.write_glb(&mut f)
} else {
let bin_file_name = format!("{}.bin", name);
let mut bin_f = out_dir.create_file(&bin_file_name)?;
gltf.write_gltf_bin(&mut f, &mut bin_f, &bin_file_name)
}
} else {
unreachable!()
};
match res {
Ok(()) => { models_written += 1; },
Err(e) => error!("failed to write {}: {}", name, e),
}
}
// Save PNGs for all the images
for ((texture_id, palette_id), image_name) in image_namer.names.drain() {
let texture = &db.textures[texture_id];
let palette = palette_id.map(|id| &db.palettes[id]);
use nds::decode_texture;
let rgba = match decode_texture(texture, palette) {
Ok(rgba) => rgba,
Err(e) => {
error!("error generating image {}, error: {}", image_name, e);
continue;
}
};
let dim = (texture.params.width(), texture.params.height());
let mut png_file = out_dir.create_file(&format!("{}.png", image_name))?;
match write_rgba(&mut png_file, &rgba.0[..], dim) {
Ok(()) => { pngs_written += 1; }
Err(e) => error!("failed writing PNG: {}", e),
}
}
// Print results
let plural = |x| if x != 1 { "s" } else { "" };
let model_file_name = match format {
"dae" => "DAE",
"glb" => "GLB",
"gltf" => "glTF",
_ => unreachable!(),
};
println!("Wrote {} {}{}, {} PNG{}.",
models_written, model_file_name, plural(models_written),
pngs_written, plural(pngs_written));
Ok(())
}
pub fn write_rgba(f: &mut File, rgba: &[u8], dim: (u32, u32)) -> Result<()> {
use png::{Encoder, ColorType, BitDepth};
let mut encoder = Encoder::new(f, dim.0, dim.1);
encoder.set_color(ColorType::RGBA);
encoder.set_depth(BitDepth::Eight);
let mut writer = encoder.write_header()?;
writer.write_image_data(rgba)?;
Ok(())
}
|
#![feature(cfg_target_feature)]
#![cfg_attr(feature = "strict", deny(warnings))]
#![cfg_attr(feature = "cargo-clippy", allow(option_unwrap_used))]
extern crate cupid;
#[macro_use]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
extern crate stdsimd;
#[test]
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
fn works() {
let information = cupid::master().unwrap();
assert_eq!(cfg_feature_enabled!("sse"), information.sse());
assert_eq!(cfg_feature_enabled!("sse2"), information.sse2());
assert_eq!(cfg_feature_enabled!("sse3"), information.sse3());
assert_eq!(cfg_feature_enabled!("ssse3"), information.ssse3());
assert_eq!(cfg_feature_enabled!("sse4.1"), information.sse4_1());
assert_eq!(cfg_feature_enabled!("sse4.2"), information.sse4_2());
assert_eq!(cfg_feature_enabled!("avx"), information.avx());
assert_eq!(cfg_feature_enabled!("avx2"), information.avx2());
assert_eq!(cfg_feature_enabled!("fma"), information.fma());
assert_eq!(cfg_feature_enabled!("bmi"), information.bmi1());
assert_eq!(cfg_feature_enabled!("bmi2"), information.bmi2());
assert_eq!(cfg_feature_enabled!("popcnt"), information.popcnt());
// TODO: tbm, abm, lzcnt
}
|
// TODO: Import those from libc, see https://github.com/rust-lang/libc/pull/1638
const AT_HWCAP: u64 = 16;
const HWCAP_SHA2: u64 = 64;
#[inline(always)]
pub fn sha2_supported() -> bool {
let hwcaps: u64 = unsafe { libc::getauxval(AT_HWCAP) };
(hwcaps & HWCAP_SHA2) != 0
}
|
use async_trait::async_trait;
use common::result::Result;
use crate::domain::contract::{Contract, ContractId};
use crate::domain::publication::PublicationId;
#[async_trait]
pub trait ContractRepository {
async fn next_id(&self) -> Result<ContractId>;
async fn find_by_id(&self, contract_id: &ContractId) -> Result<Contract>;
async fn find_by_publication_id(&self, publication_id: &PublicationId) -> Result<Contract>;
async fn find_by_status(&self, status: &str) -> Result<Vec<Contract>>;
async fn save(&self, contract: &mut Contract) -> Result<()>;
}
|
use log::*;
use std::collections::{BTreeMap, BTreeSet};
use std::fmt;
use std::ops::{Index, IndexMut};
use crate::row::Row;
pub type State = BTreeSet<usize>;
pub type Alphabet = Vec<usize>;
pub struct Table {
row_assignments: Vec<usize>,
rows: Vec<Row>,
}
impl Table {
pub fn new(rows: Vec<Row>, num_rows: usize) -> Self {
let row_assignments = (0..num_rows).collect();
Self {
rows,
row_assignments,
}
}
pub fn blank_table(size_of_alpha: usize) -> Self {
let row_assignments = (0..size_of_alpha).collect();
Self {
rows: Vec::new(),
row_assignments,
}
}
pub fn rows(&self) -> &[Row] {
&self.rows
}
pub fn rows_mut(&mut self) -> &mut [Row] {
&mut self.rows
}
pub fn push_row(&mut self, row: Row) {
self.rows.push(row);
}
pub fn does_match(&self, input: &str, mapping: &BTreeMap<char, usize>) -> Option<usize> {
debug!("running does match on input: {:?}", input);
if self.rows.is_empty() {
debug!("rows are empty");
return Some(0);
}
if input.is_empty() && !self.rows[0].is_accepting() {
debug!("accepting empty state");
return Some(0);
}
let mut current_state = 0;
let mut chars = input.char_indices();
while let Some((n, character)) = chars.next() {
debug!("{:?}", (n, character));
let transition = mapping.get(&character);
if let Some(&transition) = transition {
// If the current character matches some transition,
// the option will be some:
if let Some(next_state) = self[current_state][transition] {
// if let Some(next_state) = self[current_state][transition] {
current_state = next_state;
} else {
// Match failed, return the character that caused it to fail:
return Some(n + 1);
}
} else {
return Some(n + 1);
}
}
// If we end at an accepting state we have matched the characters.
if self[current_state].is_accepting() {
None
} else {
Some(input.len() + 1)
}
}
pub fn optimize(&mut self) {
info!("un-optimized table: \n{}", self);
// Optimize until completed
while self.optimize_step() {}
// self.remove_dead_state_simple();
// self.remove_dead_states();
// Deal with borrows.
let Self {
row_assignments,
rows,
} = self;
debug!("Alpha assigns after optimize {:?}", row_assignments);
// Update all assignments to reflect reality.
for row in rows {
for idx in row.transitions_mut().iter_mut().flatten() {
*idx = row_assignments[*idx];
}
row.id = row_assignments[row.id];
}
info!("Optimized Table: \n{}", self);
}
fn optimize_step(&mut self) -> bool {
info!("optimize step");
info!("remove dead states");
// self.remove_dead_states();
self.remove_dead_branches();
// Alpha is just a lookup table for our index optimization.
let alpha: Alphabet = (0..self[0].transitions().len()).collect();
info!("Alphabet: {:?}", alpha);
// The stack of states
let mut stack: Vec<(State, usize)> = Vec::with_capacity(10);
// The set of all states that we need to merge together
let mut merge_set: BTreeSet<State> = BTreeSet::new();
// Starting state, just partition based off of accepting
// and not accepting states.
info!("Partitioning states");
info!("Alpha assignments: {:?}", self.row_assignments);
let (accepting_states, na_states): (State, State) = self
.rows()
.iter()
.map(|r| r.id)
.partition(|index| self[self.row_assignments[*index]].is_accepting());
stack.push((accepting_states, 0));
stack.push((na_states, 0));
while let Some((state, idx)) = stack.pop() {
debug!("char: {}, state: {:?}", idx, state);
let mut character_aggregate: BTreeMap<Option<usize>, State> = BTreeMap::new();
debug!("Aggregating States on: {}", idx);
for s in state {
let transition =
self[self.row_assignments[s]][idx].map(|i| self.row_assignments[i]);
character_aggregate.entry(transition).or_default().insert(s);
}
debug!("Aggregates:");
for (key, value) in character_aggregate.iter() {
debug!("{:?} => {:?}", key, value);
}
for (_, state) in character_aggregate.into_iter().filter(|(_, s)| s.len() > 1) {
if idx + 1 >= alpha.len() {
debug!("Merging: {:?}", state);
merge_set.insert(state);
} else {
debug!("Pushing: {:?}", state);
stack.push((state, idx + 1));
}
}
debug!("");
}
info!("DFS dead_state removal");
let ret = !merge_set.is_empty(); // || self.remove_dead_states();
debug!("ret: {}", ret);
for state in merge_set {
self.merge(state);
}
ret
}
pub fn remove_dead_branches(&mut self) {
let mut marked: Vec<usize> = Vec::new();
for row in 0..self.rows.len() {
if marked.contains(&row) {
continue;
}
self.dead_bfs(row, &mut marked, &mut BTreeSet::new());
}
marked.sort();
while let Some(row) = marked.pop() {
self.rows.remove(row);
}
self.make_indexable();
}
fn dead_bfs(
&self,
row: usize,
mut marked: &mut Vec<usize>,
mut seen: &mut BTreeSet<usize>,
) -> bool {
if self.rows[row].is_accepting() {
return true;
}
if seen.contains(&row) {
return false;
}
if marked.contains(&row) {
return false;
}
let mut is_alive = false;
seen.insert(row);
for transition in self.rows[row].transitions() {
match transition {
Some(t) => is_alive = is_alive | self.dead_bfs(*t, &mut marked, &mut seen),
None => {}
}
}
if !is_alive {
marked.push(row);
}
is_alive
}
fn make_indexable(&mut self) {
let mut state_map: BTreeMap<usize, usize> = BTreeMap::new();
state_map.insert(0, 0); // Start node is ALWAYS 0
for row in self.rows.iter() {
// Populate a map
let id = row.id;
if !state_map.contains_key(&id) {
state_map.insert(id, state_map.len());
}
}
for row in self.rows_mut() {
// Change all the transitions to correct index
if let Some(id) = state_map.get(&row.id) {
row.id = *id;
}
for transition in row.transitions_mut() {
match transition {
Some(t) => {
if let Some(trans) = state_map.get(t) {
*transition = Some(*trans);
} else {
// Get rid of transitions to nodes that do not exist
*transition = None;
}
}
None => (),
};
}
}
}
pub fn remove_row_id(&mut self, row_id: usize) {
let row_idx = self.row_assignments[row_id];
self.rows.remove(row_idx);
for row in self.rows_mut() {
// Remove all transitions that reference this state
for transition in row.transitions_mut() {
if *transition == Some(row_id) {
*transition = None;
}
}
}
for t in self.row_assignments.iter_mut() {
if *t > row_idx {
*t -= 1;
}
}
}
pub fn merge(&mut self, state: State) {
debug!("Merging state: {:#?}", state);
debug!("Alpha assigns before merge: {:?}", self.row_assignments);
debug!("Table before merge: \n{}", self);
let mut states: Vec<usize> = state.into_iter().collect();
while states.len() > 1 {
let to_remove = states.pop().unwrap();
let to_keep = states.pop().unwrap();
let to_remove_idx = self.row_assignments[to_remove];
let to_keep_idx = self.row_assignments[to_keep];
debug!("To Keep: {} => {}", to_keep, to_keep_idx);
debug!("To Remove: {} => {}", to_remove, to_remove_idx);
self.merge_two(to_keep, to_remove);
debug!("Table after merging {}, {}:\n{}", to_keep, to_remove, self);
debug!(
"Alpha assigns after merging of two states: {:?}",
self.row_assignments
);
states.push(to_keep);
}
debug!("Alpha assigns after merge: {:?}", self.row_assignments);
debug!("Table after merge: \n{}", self);
}
// TODO comment this code
pub fn merge_two(&mut self, to_keep: usize, to_remove: usize) {
debug!("Keep: {} , Remove {}", to_keep, to_remove);
debug!("Self at the start of merge_two \n{}", *self);
let is_accepting = self[to_keep].is_accepting() || self[to_remove].is_accepting();
self.rows[to_keep].set_accepting(is_accepting);
self.rows.remove(to_remove);
for row in self.rows_mut() {
for trans in row.transitions_mut() {
if let Some(t) = trans {
if *t == to_remove {
*t = to_keep;
}
}
}
}
self.make_indexable();
}
}
impl From<Vec<Row>> for Table {
fn from(rows: Vec<Row>) -> Self {
let len = rows.len();
Table::new(rows, len)
}
}
impl Index<usize> for Table {
type Output = Row;
fn index(&self, index: usize) -> &Self::Output {
&self.rows[index]
}
}
impl IndexMut<usize> for Table {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.rows[index]
}
}
impl fmt::Display for Table {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for row in self.rows() {
writeln!(f, "{}", row)?;
}
Ok(())
}
}
|
use crate::{
grid::{
config::{Border as GBorder, ColoredConfig, Entity},
records::{ExactRecords, Records},
},
settings::CellOption,
};
/// Border represents a border of a Cell.
///
/// ```text
/// top border
/// |
/// V
/// corner top left ------> +_______+ <---- corner top left
/// | |
/// left border ----------> | cell | <---- right border
/// | |
/// corner bottom right --> +_______+ <---- corner bottom right
/// ^
/// |
/// bottom border
/// ```
///
/// ```rust,no_run
/// # use tabled::{Table, settings::{Modify, style::{Style, Border}, object::Rows}};
/// # let data: Vec<&'static str> = Vec::new();
/// let table = Table::new(&data)
/// .with(Style::ascii())
/// .with(Modify::new(Rows::single(0)).with(Border::default().top('x')));
/// ```
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Border(GBorder<char>);
impl Border {
/// This function constructs a cell borders with all sides set.
#[allow(clippy::too_many_arguments)]
pub const fn full(
top: char,
bottom: char,
left: char,
right: char,
top_left: char,
top_right: char,
bottom_left: char,
bottom_right: char,
) -> Self {
Self(GBorder::full(
top,
bottom,
left,
right,
top_left,
top_right,
bottom_left,
bottom_right,
))
}
/// This function constructs a cell borders with all sides's char set to a given character.
/// It behaves like [`Border::full`] with the same character set to each side.
pub const fn filled(c: char) -> Self {
Self::full(c, c, c, c, c, c, c, c)
}
/// Using this function you deconstruct the existing borders.
pub const fn empty() -> EmptyBorder {
EmptyBorder
}
/// Set a top border character.
pub const fn top(mut self, c: char) -> Self {
self.0.top = Some(c);
self
}
/// Set a bottom border character.
pub const fn bottom(mut self, c: char) -> Self {
self.0.bottom = Some(c);
self
}
/// Set a left border character.
pub const fn left(mut self, c: char) -> Self {
self.0.left = Some(c);
self
}
/// Set a right border character.
pub const fn right(mut self, c: char) -> Self {
self.0.right = Some(c);
self
}
/// Set a top left intersection character.
pub const fn corner_top_left(mut self, c: char) -> Self {
self.0.left_top_corner = Some(c);
self
}
/// Set a top right intersection character.
pub const fn corner_top_right(mut self, c: char) -> Self {
self.0.right_top_corner = Some(c);
self
}
/// Set a bottom left intersection character.
pub const fn corner_bottom_left(mut self, c: char) -> Self {
self.0.left_bottom_corner = Some(c);
self
}
/// Set a bottom right intersection character.
pub const fn corner_bottom_right(mut self, c: char) -> Self {
self.0.right_bottom_corner = Some(c);
self
}
}
impl<R> CellOption<R, ColoredConfig> for Border
where
R: Records + ExactRecords,
{
fn change(self, records: &mut R, cfg: &mut ColoredConfig, entity: Entity) {
let shape = (records.count_rows(), records.count_columns());
for pos in entity.iter(shape.0, shape.1) {
cfg.set_border(pos, self.0);
}
}
}
impl From<GBorder<char>> for Border {
fn from(b: GBorder<char>) -> Border {
Border(b)
}
}
impl From<Border> for GBorder<char> {
fn from(value: Border) -> Self {
value.0
}
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct EmptyBorder;
impl<R> CellOption<R, ColoredConfig> for EmptyBorder
where
R: Records + ExactRecords,
{
fn change(self, records: &mut R, cfg: &mut ColoredConfig, entity: Entity) {
let shape = (records.count_rows(), records.count_columns());
for pos in entity.iter(shape.0, shape.1) {
cfg.remove_border(pos, shape);
}
}
}
|
pub mod captcha;
pub mod extitem;
pub mod item;
pub mod search;
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::Write;
#[derive(Clone, Deserialize, Serialize)]
pub struct Config {
pub cookie: String,
pub base_url: String,
}
impl Config {
pub fn dump_config(&self) {
let json = serde_json::to_string(self).unwrap();
let home = std::env::var("HOME").unwrap();
let path = format!("{}/.config/rarbg", home);
std::fs::create_dir_all(path).unwrap();
File::create(format!("{}/.config/rarbg/config", home))
.and_then(|mut x| x.write(json.as_bytes()))
.unwrap();
}
}
|
use super::pherepherial::gpioa;
use super::pherepherial::rcc;
use super::pherepherial::usart1;
pub fn init() {
// PA9, PA10
rcc::apb2enr::iopaen::set(1);
rcc::apb2enr::afioen::set(1);
gpioa::crh::mode9::set(1);
gpioa::crh::cnf9::set(2);
gpioa::crh::mode10::set(0);
gpioa::crh::cnf10::set(1);
rcc::apb2enr::usart1en::set(1);
// Enable the USART by writing the UE bit in USART_CR1 register to 1.
usart1::cr1::ue::set(1);
// Program the M bit in USART_CR1 to define the word length.
usart1::cr2::stop::set(0);
// Program the number of stop bits in USART_CR2.
// Select the desired baud rate using the USART_BRR register
usart1::brr::div_mantissa::set(0xEA);
usart1::brr::div_fraction::set(0x6);
// Set the TE bit in USART_CR1 to send an idle frame as first transmission
usart1::cr1::te::set(1);
// Write the data to send in the USART_DR register (this clears the TXE bit).
// Repeat this for each data to be transmitted in case of single buffer.
usart1::dr::dr::set(0xfff);
}
pub fn send(byte: u8) {
while usart1::sr::txe::get() == 0 {
break;
}
usart1::dr::dr::set(byte as u32);
}
|
use azure_core::prelude::*;
use azure_identity::token_credentials::DefaultAzureCredential;
use azure_identity::token_credentials::TokenCredential;
use azure_storage::core::prelude::*;
use azure_storage::data_lake::prelude::*;
use chrono::Utc;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
let data_lake_client = create_data_lake_client().await.unwrap();
let file_system_name = format!("azurerustsdk-datalake-example01-{}", Utc::now().timestamp());
let file_system_client = data_lake_client
.clone()
.into_file_system_client(file_system_name.to_string());
println!("creating file system '{}'...", &file_system_name);
let create_fs_response = file_system_client.create().execute().await?;
println!("create file system response == {:?}\n", create_fs_response);
let file_path1 = "some/path/example-file1.txt";
let file_path2 = "some/path/example-file2.txt";
println!("creating file '{}'...", file_path1);
let create_file_response1 = file_system_client
.create_file(Context::default(), file_path1, FileCreateOptions::default())
.await?;
println!("create file response == {:?}\n", create_file_response1);
println!("creating file '{}'...", file_path2);
let create_file_response2 = file_system_client
.create_file(Context::default(), file_path2, FileCreateOptions::default())
.await?;
println!("create file response == {:?}\n", create_file_response2);
println!(
"renaming file '{}' to '{}' if not exists...",
file_path1, file_path2
);
let rename_file_if_not_exists_result = file_system_client
.rename_file_if_not_exists(Context::default(), file_path1, file_path2)
.await;
println!(
"rename file result (should fail) == {:?}\n",
rename_file_if_not_exists_result
);
println!("renaming file '{}' to '{}'...", file_path1, file_path2);
let rename_file_response = file_system_client
.rename_file(
Context::default(),
file_path1,
file_path2,
FileRenameOptions::default(),
)
.await?;
println!("rename file response == {:?}\n", rename_file_response);
println!("deleting file system...");
let delete_fs_response = file_system_client.delete().execute().await?;
println!("delete file system response == {:?}\n", delete_fs_response);
Ok(())
}
async fn create_data_lake_client() -> Result<DataLakeClient, Box<dyn Error + Send + Sync>> {
let account = std::env::var("ADLSGEN2_STORAGE_ACCOUNT")
.expect("Set env variable ADLSGEN2_STORAGE_ACCOUNT first!");
let master_key = std::env::var("ADLSGEN2_STORAGE_MASTER_KEY")
.expect("Set env variable ADLSGEN2_STORAGE_MASTER_KEY first!");
let http_client = new_http_client();
let storage_account_client =
StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key);
let resource_id = "https://storage.azure.com/";
println!("getting bearer token for '{}'...", resource_id);
let bearer_token = DefaultAzureCredential::default()
.get_token(resource_id)
.await?;
println!("token expires on {}\n", bearer_token.expires_on);
let storage_client = storage_account_client.as_storage_client();
Ok(DataLakeClient::new(
storage_client,
account,
bearer_token.token.secret().to_owned(),
None,
))
}
|
use std::{fmt::Display, ops::Sub};
use derive_more::From;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, From)]
pub enum ClientMessage {
Init(ClientInit),
Update(ClientUpdate),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClientInit {
pub video_hash: String,
pub name: String,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum ClientUpdate {
Timestamp { time: Time },
Seek { time: Time },
Pause { time: Time },
Resume { time: Time },
SpeedChange { factor: f64 },
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum ServerDisconnect {
IncorrectHash,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UpdateCause {
UserAction(String), // Name of client responsible.
Synchronize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlayerState {
#[serde(skip_serializing_if = "Option::is_none")]
pub time: Option<Time>,
#[serde(skip_serializing_if = "Option::is_none")]
pub speed: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub paused: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlayerUpdate {
pub state: PlayerState,
pub cause: UpdateCause,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UserUpdate {
Connected(String),
Disconnected(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerInit {
pub player_state: PlayerState,
pub users: Vec<String>,
}
#[derive(Debug, Clone, From, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ServerMessage {
Init(ServerInit),
UserUpdate(UserUpdate),
PlayerUpdate(PlayerUpdate),
Disconnect(ServerDisconnect),
}
impl PlayerUpdate {
pub fn new(cause: UpdateCause) -> Self {
Self {
state: PlayerState {
time: None,
speed: None,
paused: None,
},
cause,
}
}
pub fn with_time(mut self, t: Time) -> Self {
self.state.time = Some(t);
self
}
pub fn with_pause(mut self, p: bool) -> Self {
self.state.paused = Some(p);
self
}
pub fn with_speed(mut self, s: f64) -> Self {
self.state.speed = Some(s);
self
}
}
/// Struct for storing the time of a played a video.
/// Used for better formatting.
/// It cannot store NaN.
#[derive(Debug, PartialEq, PartialOrd, Clone, Copy, Serialize, Deserialize)]
pub struct Time {
seconds: f64,
}
impl Time {
pub fn from_seconds(seconds: f64) -> Self {
if seconds.is_nan() || seconds.is_infinite() {
panic!("Time cannot be NaN or Infinite.");
}
Self { seconds }
}
pub fn zero() -> Self {
Self { seconds: 0.0 }
}
pub fn as_seconds(&self) -> f64 {
self.seconds
}
}
impl Eq for Time {}
impl Ord for Time {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.seconds
.partial_cmp(&other.seconds)
.expect("Cannot fail, because time can never store NaN.")
}
}
impl Display for Time {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let total_seconds = self.seconds.floor() as u32;
let seconds = total_seconds % 60;
let minutes = (total_seconds / 60) % 60;
let hours = (total_seconds / (60 * 60)) % 60;
write!(f, "{:02}:{:02}:{:02}", hours, minutes, seconds)
}
}
impl Sub for Time {
type Output = f64;
fn sub(self, rhs: Self) -> Self::Output {
self.seconds - rhs.seconds
}
}
|
use crate::set::Set;
use std::collections::btree_set::BTreeSet;
impl<A> Set<A> for BTreeSet<A>
where
A: Ord,
{
fn size(&self) -> usize {
self.len()
}
fn contains(&self, value: &A) -> bool {
BTreeSet::contains(self, value)
}
fn is_subset<R: Set<A>>(&self, other: R) -> bool {
self.iter().all(|item| other.contains(item))
}
fn cloned(&self) -> Self
where
A: Clone,
{
self.clone()
}
}
|
extern crate tplinker;
use std::net::SocketAddr;
use clap::{App, Arg, SubCommand};
use tplinker::{
capabilities::Switch,
datatypes::DeviceData,
devices::Device,
};
fn command_discover(json: bool) {
for (addr, data) in tplinker::discover().unwrap() {
let device = Device::from_data(addr, &data);
if json {
discover_print_json(addr, data, device);
} else {
discover_print_human(addr, data, device);
}
}
}
fn discover_print_human(addr: SocketAddr, data: DeviceData, device: Device) {
let sysinfo = data.sysinfo();
println!(
"{}\t{}\t{}\t{}\t{}",
addr,
pad(&sysinfo.alias, 18),
pad(&sysinfo.hw_type, 20),
pad(&sysinfo.dev_name, 40),
sysinfo.model,
);
match device {
Device::HS100(device) => is_on(&device),
Device::HS110(device) => is_on(&device),
Device::LB110(device) => is_on(&device),
_ => println!("{} not switchable", sysinfo.alias),
}
}
fn pad(value: &str, padding: usize) -> String {
let pad = " ".repeat(padding.saturating_sub(value.len()));
format!("{}{}", value, pad)
}
fn is_on<T: Switch>(device: &T) {
println!("{:?}", device.is_on());
}
fn discover_print_json(_addr: SocketAddr, data: DeviceData, _device: Device) {
println!("{}", serde_json::to_string(&data).unwrap());
}
fn main() {
let matches = App::new("TPLink smart device CLI")
.version("0.1")
.author("Rob Young <rob@robyoung.digital>")
.about("Discover and interact with TPLink smart devices on the local network.")
.arg(Arg::with_name("json")
.long("json")
.takes_value(false)
.help("Respond with JSON.")
)
.subcommand(SubCommand::with_name("discover")
.about("Discover devices on the local network")
)
.get_matches();
if matches.subcommand_matches("discover").is_some() {
command_discover(matches.is_present("json"));
}
}
|
extern crate app_dirs;
extern crate clap;
extern crate rand;
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde;
pub mod shared;
pub mod librarian;
pub mod planner;
pub mod cli;
|
fn row_sum_odd_numbers(n:i64) -> i64 {
return n * n * n;
}
|
mod framework;
mod app;
fn main() {
let mut settings = framework::WindowSettings::new();
settings.gl_version = (4, 5);
settings.window_size = (1920, 1080);
settings.title = String::from("Main");
framework::Runner::new(app::App::default(), settings).run();
}
|
use bitwise::bitwiseops;
use bitwise::hex_rep::ToHexRep;
use challengeinfo::challenge::{Challenge, ChallengeInfo};
pub const INFO2: ChallengeInfo<'static> = ChallengeInfo {
set_number: 2,
challenge_number: 2,
title: "Fixed XOR",
description: "",
url: "http://cryptopals.com/sets/1/challenges/2",
};
pub const CHALLENGE2: Challenge<'static> = Challenge {
info: INFO2,
func: execute,
};
fn execute() -> String {
let hex1 = String::from("1c0111001f010100061a024b53535009181c").to_hex().unwrap();
let hex2 = String::from("686974207468652062756c6c277320657965").to_hex().unwrap();
let result = bitwiseops::exact_block_xor(hex1.as_slice(), hex2.as_slice()).unwrap();
let expected = String::from("746865206b696420646f6e277420706c6179").to_hex().unwrap();
assert_eq!(expected, result);
String::from_utf8(result).unwrap()
}
|
pub mod api_error;
pub mod client;
pub mod downloads;
pub mod query;
pub mod request_counter;
pub mod sso;
pub mod update_checker;
pub use api_error::*;
pub use client::*;
pub use downloads::*;
pub use query::*;
pub use request_counter::RequestCounter;
pub use update_checker::*;
|
use clap::{Arg, App};
use err_derive::Error;
use serde::{Serialize, Deserialize};
use shell_macro::shell;
use std::ffi::OsString;
use std::env;
use std::fs::File;
use std::path::Path;
pub const BUILD_VERSION: &str = shell!("git describe --tags $(git rev-list --tags --max-count=1)");
pub const BUILD_COMMIT_ID: &str = shell!("git log --format=\"%h\" -n 1");
pub const BUILD_TIME: &str = shell!("date +%F");
pub const AUTHORS: &str = shell!("git log --pretty=\"%an <%ae>\" | sort | uniq");
#[derive(Debug, Error)]
pub enum ConfigError {
#[error(display = "{}", _0)]
Io(#[error(source)] #[error(from)] std::io::Error),
#[error(display = "{}", _0)]
Json(#[error(source)] #[error(from)] serde_json::Error),
#[error(display = "invalid subcommand")]
InvalidSubcommand,
#[error(display = "missing {} argument", _0)]
MissingArgument(String),
}
#[derive(Debug, Clone)]
pub struct InitConfig {
pub db: String,
pub redis: String,
pub reset: bool,
pub superuser_username: Option<String>,
pub superuser_password: Option<String>,
}
#[derive(Debug, Clone)]
pub struct MediaConfig {
pub root: String,
pub url: String,
pub serve: bool,
}
#[derive(Debug, Clone)]
pub struct SmtpConfig {
pub server: String,
pub sender: String,
pub username: Option<String>,
pub password: Option<String>,
}
#[derive(Debug, Clone)]
pub struct GeoIpConfig {
pub asn: Option<String>,
pub asn_v6: Option<String>,
pub city: Option<String>,
pub city_v6: Option<String>,
}
#[derive(Debug, Clone)]
pub struct StartConfig {
pub db: String,
pub redis: String,
pub bind: String,
pub site: String,
pub media: MediaConfig,
pub smtp: SmtpConfig,
pub geoip: GeoIpConfig,
}
#[derive(Debug, Clone)]
pub enum Config {
Init(InitConfig),
Start(Box<StartConfig>),
}
#[derive(Serialize, Deserialize)]
pub struct MediaConfigFile {
root: Option<String>,
url: Option<String>,
serve: Option<bool>,
}
impl Default for MediaConfigFile {
fn default() -> Self {
Self {
root: None,
url: None,
serve: None,
}
}
}
#[derive(Serialize, Deserialize)]
pub struct SmtpConfigFile {
pub server: Option<String>,
pub sender: Option<String>,
pub username: Option<String>,
pub password: Option<String>,
}
impl Default for SmtpConfigFile {
fn default() -> Self {
Self {
server: None,
sender: None,
username: None,
password: None,
}
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GeoIpConfigFile {
pub asn: Option<String>,
pub asn_v6: Option<String>,
pub city: Option<String>,
pub city_v6: Option<String>,
}
impl Default for GeoIpConfigFile {
fn default() -> Self {
Self {
asn: None,
asn_v6: None,
city: None,
city_v6: None,
}
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ConfigFile {
db: Option<String>,
redis: Option<String>,
bind: Option<String>,
site: Option<String>,
media: Option<MediaConfigFile>,
smtp: Option<SmtpConfigFile>,
geoip: Option<GeoIpConfigFile>,
}
impl Default for ConfigFile {
fn default() -> Self {
Self {
db: None,
redis: None,
bind: None,
site: None,
media: None,
smtp: None,
geoip: None,
}
}
}
impl ConfigFile {
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError> {
let json = File::open(path)?;
Ok(serde_json::from_reader(&json)?)
}
}
impl Config {
pub fn from_env() -> Result<Self, ConfigError> {
Self::from(&mut env::args_os())
}
pub fn from<I, T>(itr: I) -> Result<Self, ConfigError>
where
I: IntoIterator<Item = T>,
T: Into<OsString> + Clone,
{
let matches = App::new("cashier-server")
.version(&format!("{} ({} {})", BUILD_VERSION.trim(),
BUILD_COMMIT_ID.trim(), BUILD_TIME.trim())[..])
.author(&AUTHORS.trim().split('\n').collect::<Vec<&str>>().join(", ")[..])
.about("Rust implementation for cashier server")
.arg(Arg::with_name("config")
.short('c')
.long("config")
.value_name("FILE")
.about("Sets a custom config file")
.takes_value(true))
.arg(Arg::with_name("db")
.long("db")
.value_name("URL")
.about("Sets the PostgreSQL connection")
.takes_value(true))
.arg(Arg::with_name("redis")
.long("redis")
.value_name("URL")
.about("Sets the redis connection")
.takes_value(true))
.arg(Arg::with_name("bind")
.long("bind")
.value_name("ADDR:PORT")
.about("Address to bind")
.takes_value(true))
.arg(Arg::with_name("site")
.long("site")
.about("Site for front-end")
.takes_value(true))
.arg(Arg::with_name("media-root")
.long("media-root")
.value_name("PATH")
.about("Path to user-uploaded contents")
.takes_value(true))
.arg(Arg::with_name("media-url")
.long("media-url")
.value_name("URL")
.about("URL prefix for user-uploaded contents")
.takes_value(true))
.arg(Arg::with_name("media-serve")
.long("media-serve")
.about("Serves user-uploaded contents"))
.arg(Arg::with_name("smtp-server")
.long("smtp-server")
.about("SMTP server used to send e-mail")
.takes_value(true))
.arg(Arg::with_name("smtp-sender")
.long("smtp-sender")
.about("SMTP sender information in the \"From\" header")
.takes_value(true))
.arg(Arg::with_name("smtp-username")
.long("smtp-username")
.about("SMTP username for authentication")
.takes_value(true))
.arg(Arg::with_name("smtp-password")
.long("smtp-password")
.about("SMTP password for authentication")
.takes_value(true))
.arg(Arg::with_name("geoip-asn")
.long("geoip-asn")
.about("IPv4 ASN info database")
.takes_value(true))
.arg(Arg::with_name("geoip-asn-v6")
.long("geoip-asn-v6")
.about("IPv6 ASN info database")
.takes_value(true))
.arg(Arg::with_name("geoip-city")
.long("geoip-city")
.about("IPv4 City info database")
.takes_value(true))
.arg(Arg::with_name("geoip-city-v6")
.long("geoip-city-v6")
.about("IPv6 City info database")
.takes_value(true))
.subcommand(App::new("init")
.about("Initializes all databases")
.arg(Arg::with_name("reset")
.long("reset")
.about("Clears all databases before initialization. THIS IS DANGEROUS"))
.arg(Arg::with_name("superuser-username")
.long("superuser-username")
.value_name("USERNAME")
.about("Creates a superuser and sets superuser's username")
.takes_value(true))
.arg(Arg::with_name("superuser-password")
.long("superuser-password")
.value_name("PASSWORD")
.about("Sets superuser's password. Leaves empty to prompt from CLI")
.takes_value(true)))
.subcommand(App::new("start")
.about("Starts the server"))
.get_matches_from(itr);
let mut config_file = ConfigFile::default();
if let Some(path) = matches.value_of("config") {
config_file = ConfigFile::load(path)?;
}
config_file.db = matches.value_of("db").map(String::from).or(config_file.db);
config_file.redis = matches.value_of("redis").map(String::from).or(config_file.redis);
config_file.bind = matches.value_of("bind").map(String::from).or(config_file.bind);
config_file.site = matches.value_of("site").map(String::from).or(config_file.site);
let mut default_media_config_file = MediaConfigFile::default();
let media_config_file = config_file.media.as_mut()
.unwrap_or(&mut default_media_config_file);
media_config_file.root = matches.value_of("media-root").map(String::from)
.or_else(|| media_config_file.root.clone());
media_config_file.url = matches.value_of("media-url").map(String::from)
.or_else(|| media_config_file.url.clone());
if matches.is_present("media-serve") {
media_config_file.serve = Some(true);
}
let mut default_smtp_config_file = SmtpConfigFile::default();
let smtp_config_file = config_file.smtp.as_mut()
.unwrap_or(&mut default_smtp_config_file);
smtp_config_file.server = matches.value_of("smtp-server").map(String::from)
.or_else(|| smtp_config_file.server.clone());
smtp_config_file.sender = matches.value_of("smtp-sender").map(String::from)
.or_else(|| smtp_config_file.sender.clone());
smtp_config_file.username = matches.value_of("smtp-username").map(String::from)
.or_else(|| smtp_config_file.username.clone());
smtp_config_file.password = matches.value_of("smtp-password").map(String::from)
.or_else(|| smtp_config_file.password.clone());
let mut default_geoip_config_file = GeoIpConfigFile::default();
let geoip_config_file = config_file.geoip.as_mut()
.unwrap_or(&mut default_geoip_config_file);
geoip_config_file.asn = matches.value_of("geoip-asn").map(String::from)
.or_else(|| geoip_config_file.asn.clone());
geoip_config_file.asn_v6 = matches.value_of("geoip-asn-v6").map(String::from)
.or_else(|| geoip_config_file.asn_v6.clone());
geoip_config_file.city = matches.value_of("geoip-city").map(String::from)
.or_else(|| geoip_config_file.city.clone());
geoip_config_file.city_v6 = matches.value_of("geoip-city-v6").map(String::from)
.or_else(|| geoip_config_file.city_v6.clone());
match matches.subcommand() {
("init", Some(sub_matches)) => Ok(Config::Init(InitConfig {
db: config_file.db
.ok_or_else(|| ConfigError::MissingArgument("database".into()))?,
redis: config_file.redis
.ok_or_else(|| ConfigError::MissingArgument("redis".into()))?,
reset: sub_matches.is_present("reset"),
superuser_username: sub_matches.value_of("superuser-username").map(String::from),
superuser_password: sub_matches.value_of("superuser-password").map(String::from),
})),
("start", Some(_)) => Ok(Config::Start(Box::new(StartConfig {
db: config_file.db
.ok_or_else(|| ConfigError::MissingArgument("database".into()))?,
redis: config_file.redis
.ok_or_else(|| ConfigError::MissingArgument("redis".into()))?,
bind: config_file.bind
.ok_or_else(|| ConfigError::MissingArgument("bind".into()))?,
site: config_file.site
.ok_or_else(|| ConfigError::MissingArgument("site".into()))?,
media: MediaConfig {
root: media_config_file.root.clone()
.ok_or_else(|| ConfigError::MissingArgument("media.root".into()))?,
url: media_config_file.url.clone()
.ok_or_else(|| ConfigError::MissingArgument("media.url".into()))?,
serve: media_config_file.serve.contains(&true),
},
smtp: SmtpConfig {
server: smtp_config_file.server.clone()
.ok_or_else(|| ConfigError::MissingArgument("smtp.server".into()))?,
sender: smtp_config_file.sender.clone()
.ok_or_else(|| ConfigError::MissingArgument("smtp.sender".into()))?,
username: smtp_config_file.username.clone(),
password: smtp_config_file.password.clone(),
},
geoip: GeoIpConfig {
asn: geoip_config_file.asn.clone(),
asn_v6: geoip_config_file.asn_v6.clone(),
city: geoip_config_file.city.clone(),
city_v6: geoip_config_file.city_v6.clone(),
},
}))),
_ => Err(ConfigError::InvalidSubcommand)
}
}
}
|
//! This module contains a collection of various tools to use to manipulate
//! and control messages and data associated with raft.
// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::u64;
use protobuf::Message;
/// A number to represent that there is no limit.
pub const NO_LIMIT: u64 = u64::MAX;
/// Truncates the list of entries down to a specific byte-length of
/// all entries together.
///
/// # Examples
///
/// ```
/// use raft::{util::limit_size, prelude::*};
///
/// let template = {
/// let mut entry = Entry::new();
/// entry.set_data("*".repeat(100).into_bytes());
/// entry
/// };
///
/// // Make a bunch of entries that are ~100 bytes long
/// let mut entries = vec![
/// template.clone(),
/// template.clone(),
/// template.clone(),
/// template.clone(),
/// template.clone(),
/// ];
///
/// assert_eq!(entries.len(), 5);
/// limit_size(&mut entries, 220);
/// assert_eq!(entries.len(), 2);
/// ```
pub fn limit_size<T: Message + Clone>(entries: &mut Vec<T>, max: u64) {
if max == NO_LIMIT || entries.len() <= 1 {
return;
}
let mut size = 0;
let limit = entries
.iter()
.take_while(|&e| {
if size == 0 {
size += u64::from(Message::compute_size(e));
true
} else {
size += u64::from(Message::compute_size(e));
size <= max
}
})
.count();
entries.truncate(limit);
}
|
pub fn max_area_ori(height: Vec<i32>) -> i32 {
use std::cmp::{max, min};
let mut largest = min(height[1], height[0]);
for i in 0..height.len()-1 {
if height[i] == 0 {
continue
}
for j in max(i+1, (largest / height[i]) as usize)..height.len() {
let volume = (j-i) as i32 * min(height[i], height[j]);
largest = max(largest, volume);
}
}
largest
}
pub fn max_area_std(height: Vec<i32>) -> i32 {
use std::cmp::{max, min};
let n = height.len();
let mut largest = min(height[n-1], height[0]) * (n-1) as i32;
let mut left = 0;
let mut right = n-1;
while left < right {
largest = max(largest, min(height[right], height[left]) * (right-left) as i32);
if height[left] > height[right] {
right -= 1;
} else {
left += 1;
}
}
largest
}
#[test]
fn test_max_area() {
assert_eq!(max_area_std(vec![1,8,6,2,5,4,8,3,7]), 49);
assert_eq!(max_area_std(vec![1,8,6,2,0,4,8,3,7]), 49);
}
|
use std::{borrow::Cow, iter, ops, slice};
use crate::{
bit_set,
bit_set::ops::*,
bit_set::private,
bit_set::{BoxWords, CowWords, Words},
};
/// Trait for an unsigned int; seen as a bit vector with fixed size
/// or an element of bit vector.
///
/// This trait is public but sealed.
pub trait Word:
'static
+ Copy
+ Eq
+ Ord
+ Default
+ ops::Add<Output = Self>
+ ops::AddAssign
+ ops::Sub<Output = Self>
+ ops::SubAssign
+ ops::Mul<Output = Self>
+ ops::MulAssign
+ ops::Div<Output = Self>
+ ops::DivAssign
+ ops::Rem<Output = Self>
+ ops::RemAssign
+ ops::Shl<Output = Self>
+ ops::ShlAssign
+ ops::Shr<Output = Self>
+ ops::ShrAssign
+ ops::BitAnd<Output = Self>
+ ops::BitAndAssign
+ ops::BitOr<Output = Self>
+ ops::BitOrAssign
+ ops::BitXor<Output = Self>
+ ops::BitXorAssign
+ ops::Not<Output = Self>
+ iter::Sum
+ CastTo<u8>
+ CastTo<u16>
+ CastTo<u32>
+ CastTo<u64>
+ CastTo<u128>
+ CastTo<usize>
+ Capacity
+ Access
+ Count
+ Rank
+ Select0
+ Select1
+ Insert
+ Remove
+ private::Sealed
{
const ZERO: Self;
fn bit(i: Self) -> Self;
fn mask(i: Self) -> Self;
/// Search the smallest index in range at which f(i) is true,
/// assuming that f(i) == true implies f(i+1) == true.
fn search(end: Self, func: impl Fn(Self) -> bool) -> Self;
}
/// Lossless cast that never fail.
pub trait CastAs<T>: private::Sealed {
fn cast_as(self) -> T;
}
/// Lossless cast that may fail.
pub trait CastTo<T>: private::Sealed {
fn cast_to(self) -> Option<T>;
}
/// Short for `cast_to().unwrap()`
pub fn cast<U: CastTo<T>, T>(u: U) -> T {
u.cast_to().unwrap()
}
// /// Short for `cast_to().unwrap_or_else(|| panic!(msg))`
// pub fn cast_expect<U: CastTo<T>, T>(u: U, msg: &'static str) -> T {
// u.cast_to().unwrap_or_else(|| panic!(msg))
// }
impl<T: Word> CastAs<T> for T {
fn cast_as(self) -> T {
self
}
}
impl<T: Word, A: CastAs<T>> CastTo<T> for A {
fn cast_to(self) -> Option<T> {
Some(self.cast_as())
}
}
macro_rules! impl_Word {
($($ty:ty),*) => ($(
impl Word for $ty {
const ZERO: Self = 0;
fn bit(i: Self) -> Self {
1 << i
}
fn mask(i: Self) -> Self {
(1 << i) - 1
}
fn search(end: $ty, func: impl Fn($ty) -> bool) -> $ty {
let mut i = Self::ZERO;
let mut j = end;
while i < j {
let h = i + (j - i) / 2;
if func(h) {
j = h; // f(j) == true
} else {
i = h + 1; // f(i-1) == false
}
}
i // f(i-1) == false && f(i) (= f(j)) == true
}
}
)*)
}
impl_Word!(u8, u16, u32, u64, u128, usize);
macro_rules! impl_CastAs {
( $small:ty, $( $large:ty ),* ) => ($(
impl CastAs<$large> for $small {
#[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
#[inline]
fn cast_as(self) -> $large {
self as $large
}
}
)*)
}
impl_CastAs!(u8, u16, u32, u64, u128);
impl_CastAs!(u16, u32, u64, u128);
impl_CastAs!(u32, u64, u128);
impl_CastAs!(u64, u128);
#[cfg(target_pointer_width = "32")]
mod cast_as_for_usize {
use super::*;
impl_CastAs!(u8, usize);
impl_CastAs!(u16, usize);
impl_CastAs!(u32, usize);
impl_CastAs!(usize, u32, u64, u128);
}
#[cfg(target_pointer_width = "64")]
mod cast_as_for_usize {
use super::*;
impl_CastAs!(u8, usize);
impl_CastAs!(u16, usize);
impl_CastAs!(u32, usize);
impl_CastAs!(u64, usize);
impl_CastAs!(usize, u64, u128);
}
macro_rules! impl_CastTo {
( $large:ty, $( $small:ty ),* ) => ($(
impl CastTo<$small> for $large {
#[cfg_attr(feature = "cargo-clippy", allow(cast_lossless))]
#[inline]
fn cast_to(self) -> Option<$small> {
const MIN: $small = 0;
const MAX: $small = !MIN;
if self <= MAX as $large {
Some(self as $small)
} else {
None
}
}
}
)*)
}
impl_CastTo!(u128, u64, u32, u16, u8);
impl_CastTo!(u64, u32, u16, u8);
impl_CastTo!(u32, u16, u8);
impl_CastTo!(u16, u8);
#[cfg(target_pointer_width = "32")]
mod cast_to_for_usize {
use super::*;
impl_CastTo!(u64, usize);
impl_CastTo!(u128, usize);
impl_CastTo!(usize, u8, u16);
}
#[cfg(target_pointer_width = "64")]
mod cast_to_for_usize {
use super::*;
impl_CastTo!(u128, usize);
impl_CastTo!(usize, u8, u16, u32);
}
impl<T> Default for Words<T> {
fn default() -> Self {
Words(None)
}
}
impl<T> Words<T> {
pub fn as_ref(&self) -> Option<&T> {
self.0.as_ref()
}
pub fn as_mut(&mut self) -> Option<&mut T> {
self.0.as_mut()
}
}
impl<T> Capacity for Words<T> {
const CAPACITY: u64 = bit_set::SHORT_BIT_MAX;
}
impl<T: Word> BoxWords<T> {
pub const LEN: usize = (Self::CAPACITY / T::CAPACITY) as usize;
}
impl<T: Word> CowWords<'_, T> {
pub const LEN: usize = (Self::CAPACITY / T::CAPACITY) as usize;
}
impl<T: Word> From<CowWords<'_, T>> for BoxWords<T> {
fn from(Words(block): CowWords<'_, T>) -> Self {
Words(block.map(|cow| cow.into_owned().into_boxed_slice()))
}
}
impl<T: Word> From<BoxWords<T>> for CowWords<'_, T> {
fn from(Words(block): BoxWords<T>) -> Self {
Words(block.map(|arr| Cow::Owned(arr.into_vec())))
}
}
impl<'a, T: Word> From<&'a BoxWords<T>> for CowWords<'a, T> {
fn from(block: &'a BoxWords<T>) -> Self {
Words(block.as_ref().map(|ws| Cow::Borrowed(&ws[..])))
}
}
impl<'a, T: Word> From<&'a [T]> for CowWords<'a, T> {
fn from(slice: &'a [T]) -> Self {
Words(Some(Cow::Borrowed(&slice[0..Self::LEN])))
}
}
impl<T: Word> From<Vec<T>> for BoxWords<T> {
fn from(mut vec: Vec<T>) -> Self {
vec.resize(Self::LEN, T::ZERO);
Words(Some(vec.into_boxed_slice()))
}
}
impl<T: Word> From<Vec<T>> for CowWords<'_, T> {
fn from(mut vec: Vec<T>) -> Self {
vec.resize(Self::LEN, T::ZERO);
Words(Some(Cow::Owned(vec)))
}
}
impl<T: Word> BoxWords<T> {
/// Return an empty Words.
pub fn empty() -> Self {
Words(None)
}
/// Constructs a new instance with each element initialized to value.
pub fn splat(value: T) -> Self {
Words(Some(vec![value; Self::LEN].into_boxed_slice()))
}
pub fn len(&self) -> usize {
self.as_cow().len()
}
pub fn is_empty(&self) -> bool {
self.as_cow().is_empty()
}
pub fn iter<'r>(&'r self) -> impl Iterator<Item = T> + 'r {
self.into_iter()
}
}
impl<T: Word> CowWords<'_, T> {
/// Return an empty Words.
pub fn empty() -> Self {
Words(None)
}
/// Constructs a new instance with each element initialized to value.
pub fn splat(value: T) -> Self {
Words(Some(Cow::Owned(vec![value; Self::LEN])))
}
pub fn len(&self) -> usize {
match self.as_ref() {
None => 0,
Some(ref vec) => vec.len(),
}
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn iter<'r>(&'r self) -> impl Iterator<Item = T> + 'r {
self.into_iter()
}
}
impl<T: Word> BoxWords<T> {
pub(crate) fn as_cow(&self) -> CowWords<'_, T> {
Words::from(self)
}
fn init(&mut self) -> &mut [T] {
if self.0.is_none() {
*self = Self::splat(T::ZERO);
}
self.0.as_mut().unwrap()
}
}
impl<'r, T: Word> IntoIterator for &'r BoxWords<T> {
type Item = T;
type IntoIter = WordsIter<'r, T>;
fn into_iter(self) -> Self::IntoIter {
WordsIter(self.as_ref().map(|b| b.into_iter().cloned()))
}
}
impl<'r, 'a, T: Word> IntoIterator for &'r CowWords<'a, T> {
type Item = T;
type IntoIter = WordsIter<'r, T>;
fn into_iter(self) -> Self::IntoIter {
WordsIter(self.as_ref().map(|b| b.into_iter().cloned()))
}
}
pub struct WordsIter<'a, T: Word>(Option<iter::Cloned<slice::Iter<'a, T>>>);
impl<'a, T: Word> Iterator for WordsIter<'a, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.0.as_mut().and_then(|i| i.next())
}
}
impl<T: Word> bit_set::ops::Access for BoxWords<T> {
fn size(&self) -> u64 {
Self::CAPACITY
}
fn access(&self, i: u64) -> bool {
assert!(i < Self::CAPACITY, bit_set::OUT_OF_BOUNDS);
self.as_cow().access(i)
}
}
impl<T: Word> bit_set::ops::Access for CowWords<'_, T> {
fn size(&self) -> u64 {
Self::CAPACITY
}
fn access(&self, i: u64) -> bool {
assert!(i < Self::CAPACITY, bit_set::OUT_OF_BOUNDS);
self.as_ref().map_or(false, |cow| cow.access(i))
}
}
impl<T: Word> bit_set::ops::Count for BoxWords<T> {
fn count1(&self) -> u64 {
self.as_cow().count1()
}
}
impl<T: Word> bit_set::ops::Count for CowWords<'_, T> {
fn count1(&self) -> u64 {
self.as_ref().map_or(0, |cow| cow.count1())
}
}
impl<T: Word> bit_set::ops::Rank for BoxWords<T> {
fn rank1(&self, i: u64) -> u64 {
self.as_cow().rank1(i)
}
}
impl<T: Word> bit_set::ops::Rank for CowWords<'_, T> {
fn rank1(&self, i: u64) -> u64 {
assert!(i <= Self::CAPACITY, bit_set::OUT_OF_BOUNDS);
self.as_ref().map_or(0, |cow| cow.rank1(i))
}
}
impl<T: Word> bit_set::ops::Select1 for BoxWords<T> {
fn select1(&self, n: u64) -> u64 {
self.as_cow().select1(n)
}
}
impl<T: Word> bit_set::ops::Select0 for BoxWords<T> {
fn select0(&self, n: u64) -> u64 {
self.as_cow().select0(n)
}
}
impl<T: Word> bit_set::ops::Select1 for CowWords<'_, T> {
fn select1(&self, n: u64) -> u64 {
assert!(n < self.count1());
self.as_ref().expect("should not happen").select1(n)
}
}
impl<T: Word> bit_set::ops::Select0 for CowWords<'_, T> {
fn select0(&self, n: u64) -> u64 {
assert!(n < self.count0());
self.as_ref().map_or(n, |bv| bv.select0(n))
}
}
impl<T: Word> bit_set::ops::Insert for BoxWords<T> {
fn insert(&mut self, i: u64) -> bool {
assert!(i < Self::CAPACITY);
self.init().insert(i)
}
}
impl<T: Word> bit_set::ops::Insert for CowWords<'_, T> {
fn insert(&mut self, i: u64) -> bool {
assert!(i < Self::CAPACITY);
if self.0.is_none() {
*self = Self::splat(T::ZERO);
}
let bv = self.as_mut().unwrap();
<[T] as bit_set::ops::Insert>::insert(bv.to_mut(), i)
}
}
impl<T: Word> bit_set::ops::Remove for BoxWords<T> {
fn remove(&mut self, i: u64) -> bool {
assert!(i < Self::CAPACITY);
if let Some(bv) = self.as_mut() {
bv.remove(i)
} else {
false
}
}
}
impl<T: Word> bit_set::ops::Remove for CowWords<'_, T> {
fn remove(&mut self, i: u64) -> bool {
assert!(i < Self::CAPACITY);
if let Some(bv) = self.as_mut() {
<[T] as bit_set::ops::Remove>::remove(bv.to_mut(), i)
} else {
false
}
}
}
impl<'a, T: Word> ops::BitAnd<CowWords<'a, T>> for CowWords<'a, T> {
type Output = CowWords<'a, T>;
fn bitand(self, that: CowWords<'a, T>) -> Self::Output {
Words(match (self.0, that.0) {
(None, _) | (_, None) => None,
(Some(ref buf), _) | (_, Some(ref buf)) if buf.is_empty() => None,
(Some(mut lhs), Some(rhs)) => {
assert_eq!(lhs.len(), rhs.len());
let ones = {
let zip = lhs.to_mut().iter_mut().zip(rhs.iter());
let mut acc = 0;
for (x, y) in zip {
*x &= *y;
acc += x.count1();
}
acc
};
if ones > 0 {
Some(lhs)
} else {
None
}
}
})
}
}
impl<'a, T: Word> ops::BitOr<CowWords<'a, T>> for CowWords<'a, T> {
type Output = CowWords<'a, T>;
fn bitor(self, that: CowWords<'a, T>) -> Self::Output {
Words(match (self.0, that.0) {
(None, None) => None,
(Some(buf), None) | (None, Some(buf)) => Some(buf),
(Some(mut lhs), Some(rhs)) => {
assert_eq!(lhs.len(), rhs.len());
{
let zip = lhs.to_mut().iter_mut().zip(rhs.iter());
for (x, y) in zip {
*x |= *y;
}
}
Some(lhs)
}
})
}
}
impl<'a, T: Word> ops::BitXor<CowWords<'a, T>> for CowWords<'a, T> {
type Output = CowWords<'a, T>;
fn bitxor(self, that: CowWords<'a, T>) -> Self::Output {
Words(match (self.0, that.0) {
(None, None) => None,
(Some(buf), None) | (None, Some(buf)) => Some(buf),
(Some(mut lhs), Some(rhs)) => {
assert_eq!(lhs.len(), rhs.len());
{
let lhs_vec = lhs.to_mut();
let zip = lhs_vec.iter_mut().zip(rhs.iter());
for (x, y) in zip {
*x ^= *y;
}
};
Some(lhs)
}
})
}
}
impl<'a, T: Word> ops::Not for CowWords<'a, T> {
type Output = CowWords<'a, T>;
fn not(self) -> Self::Output {
Words(match self.0 {
Some(mut buf) => {
let ones = {
let vec = buf.to_mut();
vec.resize(BoxWords::<T>::LEN, T::ZERO);
let mut acc = 0;
for i in 0..vec.len() {
vec[i] = !vec[i];
acc += vec[i].count1();
}
acc
};
if ones > 0 {
Some(buf)
} else {
None
}
}
None => Some(Cow::Owned(vec![!T::ZERO; BoxWords::<T>::LEN])),
})
}
}
|
use ethabi::{Function, Token, Uint};
use crate::utils::contract_builder::ContractBuilder;
#[derive(Debug, Clone)]
pub struct ExampleContract {
send_fn: Function,
request_fn: Function,
}
#[derive(Debug, Clone)]
pub enum ExampleContractError {
InvalidArgument(String),
FailedConversion(String),
UnknownError(String),
}
impl From<ethabi::Error> for ExampleContractError {
fn from(error: ethabi::Error) -> Self {
match error {
ethabi::Error::InvalidData => Self::InvalidArgument("Invalid argument".into()),
_ => Self::UnknownError("Error not known to the universe".to_owned()),
}
}
}
impl From<web3::ethabi::Error> for ExampleContractError {
fn from(_error: web3::ethabi::Error) -> Self {
Self::FailedConversion("Failed to convert".to_owned())
}
}
impl From<hex::FromHexError> for ExampleContractError {
fn from(_error: hex::FromHexError) -> Self {
Self::FailedConversion("Failed to convert from Hex".to_owned())
}
}
impl ExampleContract {
pub fn new() -> Self {
let send_fn = ContractBuilder::create_send_fn();
let request_fn = ContractBuilder::create_request_fn();
Self {
send_fn,
request_fn,
}
}
pub fn encode_send_with_args(
&self,
to: &str,
amount: &str,
) -> Result<Vec<u8>, ExampleContractError> {
let to = &to[2..];
let bytes = hex::decode(to)?;
let to = Token::FixedBytes(bytes);
if let Ok(amount) = Uint::from_dec_str(amount) {
let amount = Token::Uint(amount);
let res = self.send_fn.encode_input(&[to, amount])?;
Ok(res)
} else {
Err(ExampleContractError::FailedConversion(
"Failed to convert from decimal".to_owned(),
))
}
}
pub fn encode_request_with_args(
&self,
from: &str,
amount: &str,
) -> Result<Vec<u8>, ExampleContractError> {
let from = &from[2..];
let bytes = hex::decode(from)?;
let from = Token::FixedBytes(bytes);
if let Ok(amount) = Uint::from_dec_str(amount) {
let amount = Token::Uint(amount);
let res = self.request_fn.encode_input(&[from, amount])?;
Ok(res)
} else {
Err(ExampleContractError::FailedConversion(
"Failed to convert from decimal".to_owned(),
))
}
}
}
|
pub mod microvm;
pub mod risc_v_emu;
pub mod r650x;
fn main() {
}
|
use crate::{EntryTrigger, Hidden, Name, PeriodicHiding};
use specs::prelude::*;
pub struct PeriodicHidingSystem {}
impl<'a> System<'a> for PeriodicHidingSystem {
type SystemData = (
Entities<'a>,
WriteStorage<'a, PeriodicHiding>,
WriteStorage<'a, Hidden>,
WriteStorage<'a, EntryTrigger>,
ReadStorage<'a, Name>,
);
fn run(&mut self, data: Self::SystemData) {
let (entities, mut periodic_hiding_store, mut hidden_store, mut trigger_store, names) =
data;
for (e, hiding) in (&entities, &mut periodic_hiding_store).join() {
hiding.offset = (hiding.offset + 1) % hiding.period;
if hiding.offset == 0 {
if let Some(hidden) = hidden_store.get(e) {
hidden_store.remove(e);
trigger_store
.insert(e, EntryTrigger {})
.expect("Unable to insert EntryTrigger in Periodic Hiding System");
} else {
trigger_store.remove(e);
hidden_store
.insert(e, Hidden {})
.expect("Unable to insert Hidden in Periodic Hiding System");
}
}
}
}
}
|
use libc::c_int;
use libc::c_void;
use crate::generated::{spdk_poller, spdk_poller_register /*spdk_poller_unregister*/};
pub struct PollerHandle {
#[allow(dead_code)]
pub(crate) poller: *mut spdk_poller,
#[allow(dead_code)]
pub(crate) closure: Box<dyn Fn() -> bool>,
}
impl Drop for PollerHandle {
#[allow(clippy::cast_ptr_alignment)]
fn drop(&mut self) {
// TODO(jkozlowski): Fix this up eventually, it somehow causes a double-free.
//let tmp_poller = self.poller;
// This is rather dogdy, spdk_poller_unregister will write NULL to self.poller,
// hopefully that isn't going to crash!
//unsafe { spdk_poller_unregister(tmp_poller as *mut *mut spdk_poller) }
}
}
/// Registers a poller with spdk.
/// f: should return true if any work was done
pub fn poller_register<F>(f: F) -> PollerHandle
where
F: Fn() -> bool + 'static,
{
extern "C" fn poller_wrapper<F>(closure: *mut c_void) -> c_int
where
F: Fn() -> bool,
{
let opt_closure = closure as *mut F;
let work_done = unsafe { (*opt_closure)() };
if work_done {
1
} else {
0
}
}
let f_raw = Box::into_raw(Box::new(f)) as *mut dyn Fn() -> bool;
let f_pointer = f_raw as *const _ as *mut c_void;
let poller = unsafe { spdk_poller_register(Some(poller_wrapper::<F>), f_pointer, 0) };
PollerHandle {
// TODO: handle failure
poller,
closure: unsafe { Box::from_raw(f_raw) },
}
}
|
#![feature(plugin, box_syntax, box_patterns, slice_patterns, advanced_slice_patterns, exit_status)]
#[macro_use] extern crate ast;
#[macro_use] extern crate middle;
extern crate getopts;
extern crate rbtree;
use ast::ast::CompilationUnit;
use ast::context::{Context, CONTEXT};
use ast::error::{FatalError, ERRORS};
use ast::{create_ast, create_multi_ast};
use middle::arena::Arena;
use middle::name_resolve::name_resolve;
use middle::ordering::check_ordering;
use middle::reachability::check_reachability;
use getopts::Options;
use emit::emit;
use std::{io, thread, env};
use std::io::Write;
use std::cell::RefCell;
pub mod context;
pub mod mangle;
pub mod code;
pub mod emit;
pub mod descriptors;
pub mod stack;
pub mod method;
pub mod ref_alloc;
pub mod strings;
fn driver(ctx: &RefCell<Context>) {
let mut opts = Options::new();
opts.optflag("v", "verbose", "verbose - print parsing debug info");
opts.optflag("", "multi", "accept concatenated compilation units");
let matches = match opts.parse(&env::args().collect::<Vec<_>>()[1..]) {
Ok(m) => m,
Err(f) => {
writeln!(&mut io::stderr(), "{}", f).unwrap();
env::set_exit_status(1);
return
}
};
if matches.opt_present("verbose") {
ctx.borrow_mut().verbose = true;
}
if matches.free.is_empty() {
// TODO: Should have a print_usage function.
println!("No input file specified.");
return;
};
let mut asts: Vec<CompilationUnit> = vec![];
for ref file in matches.free.iter() {
if ctx.borrow().verbose {
println!("Parsing file {}...", file);
}
if matches.opt_present("multi") {
asts.extend(create_multi_ast(ctx, &file).into_iter());
} else if let Some(ast) = create_ast(ctx, &file) {
asts.push(ast);
} else {
return;
}
if ERRORS.with(|v| v.get()) > 0 { return; }
}
if asts.len() == 0 {
assert!(ERRORS.with(|v| v.get()) > 0);
return;
}
let arena = Arena::new();
let universe = name_resolve(&arena, &*asts);
if ERRORS.with(|v| v.get()) > 0 {
// Some errors occurred. It's not a good idea to continue, to avoid crashing the compiler.
return;
}
// Static analysis
check_ordering(&universe);
check_reachability(&universe);
if ERRORS.with(|v| v.get()) > 0 {
return;
}
// All checking done. Start emitting code.
emit(&universe);
}
fn main() {
let error = match thread::Builder::new()
.stack_size(64 * 1024 * 1024)
.spawn(|| {
CONTEXT.with(|ctx| driver(ctx));
ERRORS.with(|v| v.get())
}).unwrap().join() {
Err(res) => {
if res.is::<FatalError>() {
true
} else {
// The compiler had a problem
env::set_exit_status(1);
false
}
},
Ok(num) => num > 0
};
if error {
env::set_exit_status(42);
}
}
|
//! Program state processor
use crate::access_control;
use fund::{
accounts::{
fund::{Fund, FundType},
vault::TokenVault,
},
error::{FundError, FundErrorCode},
};
use serum_common::pack::Pack;
use solana_program::{
account_info::{next_account_info, AccountInfo},
msg,
program_option::COption,
pubkey::Pubkey,
};
use std::convert::Into;
pub fn handler(
program_id: &Pubkey,
accounts: &[AccountInfo],
owner: Pubkey,
authority: Pubkey,
max_balance: u64,
fund_type: FundType,
) -> Result<(), FundError> {
msg!("Initialize Fund");
let acc_infos = &mut accounts.iter();
let fund_acc_info = next_account_info(acc_infos)?;
let vault_acc_info = next_account_info(acc_infos)?;
let mint_acc_info = next_account_info(acc_infos)?;
let rent_acc_info = next_account_info(acc_infos)?;
// Optional accounts
let whitelist_acc_info = acc_infos.next();
let nft_token_acc_info = acc_infos.next();
let nft_mint_acc_info = acc_infos.next();
access_control(AccessControlRequest {
program_id,
fund_acc_info,
mint_acc_info,
vault_acc_info,
rent_acc_info,
nft_mint_acc_info,
nonce: 0,
})?;
// 2. Creation
msg!("create fund");
Fund::unpack_mut(
&mut fund_acc_info.try_borrow_mut_data()?,
&mut |fund_acc: &mut Fund| {
state_transition(StateTransitionRequest {
fund_acc,
owner,
authority,
mint: mint_acc_info.key,
nft_mint_acc_info,
nft_token_acc_info,
vault: *vault_acc_info.key,
whitelist_acc_info,
fund_type,
nonce: 0,
max_balance,
})
.map_err(Into::into)
},
)?;
Ok(())
}
fn access_control(req: AccessControlRequest) -> Result<(), FundError> {
msg!("access-control: initialize");
let AccessControlRequest {
program_id,
fund_acc_info,
mint_acc_info,
rent_acc_info,
nft_mint_acc_info,
vault_acc_info,
nonce,
} = req;
let rent = access_control::rent(rent_acc_info)?;
let fund = Fund::unpack(&fund_acc_info.try_borrow_data()?)?;
{
if fund_acc_info.owner != program_id {
return Err(FundErrorCode::NotOwnedByProgram.into());
}
if !rent.is_exempt(fund_acc_info.lamports(), fund_acc_info.try_data_len()?) {
return Err(FundErrorCode::NotRentExempt.into());
}
if fund.initialized {
return Err(FundErrorCode::AlreadyInitialized.into());
}
}
{
let vault = access_control::token(vault_acc_info)?;
let vault_authority = Pubkey::create_program_address(
&TokenVault::signer_seeds(fund_acc_info.key, &nonce),
program_id,
)
.map_err(|_| FundErrorCode::InvalidVaultNonce)?;
if vault.owner != vault_authority {
return Err(FundErrorCode::InvalidVault.into());
}
}
if fund.fund_type.eq(&FundType::Raise { private: false })
|| fund.fund_type.eq(&FundType::Raise { private: true })
{
let nft_mint = access_control::mint(nft_mint_acc_info.unwrap())?;
let fund_authority = Pubkey::create_program_address(
&TokenVault::signer_seeds(&fund_acc_info.key, &fund.nonce),
program_id,
)
.map_err(|_| FundErrorCode::InvalidVaultNonce)?;
if nft_mint.mint_authority != COption::Some(fund_authority) {
return Err(FundErrorCode::InvalidMintAuthority.into());
}
}
// Mint (initialized but not yet on Safe).
let _ = access_control::mint(mint_acc_info)?;
msg!("access-control: success");
Ok(())
}
fn state_transition(req: StateTransitionRequest) -> Result<(), FundError> {
msg!("state-transition: initialize");
let StateTransitionRequest {
fund_acc,
owner,
authority,
vault,
mint,
nft_mint_acc_info,
nft_token_acc_info,
fund_type,
nonce,
max_balance,
whitelist_acc_info,
} = req;
fund_acc.initialized = true;
fund_acc.open = true;
fund_acc.owner = owner;
fund_acc.authority = authority;
fund_acc.vault = vault;
fund_acc.mint = *mint;
fund_acc.max_balance = max_balance;
fund_acc.balance = 0;
fund_acc.fund_type = fund_type;
fund_acc.nonce = nonce;
if fund_type.eq(&FundType::Raise { private: false })
|| fund_type.eq(&FundType::Raise { private: true })
{
fund_acc.nft_mint = *nft_mint_acc_info.unwrap().key;
fund_acc.nft_account = *nft_token_acc_info.unwrap().key;
fund_acc.round = 0u32;
}
if fund_type.eq(&FundType::Raise { private: true }) {
fund_acc.whitelist = *whitelist_acc_info.unwrap().key;
}
msg!("state-transition: success");
Ok(())
}
struct AccessControlRequest<'a, 'b> {
program_id: &'a Pubkey,
fund_acc_info: &'a AccountInfo<'b>,
mint_acc_info: &'a AccountInfo<'b>,
rent_acc_info: &'a AccountInfo<'b>,
nft_mint_acc_info: Option<&'a AccountInfo<'b>>,
vault_acc_info: &'a AccountInfo<'b>,
nonce: u8,
}
struct StateTransitionRequest<'a, 'b> {
fund_acc: &'a mut Fund,
owner: Pubkey,
mint: &'a Pubkey,
whitelist_acc_info: Option<&'a AccountInfo<'b>>,
nft_token_acc_info: Option<&'a AccountInfo<'b>>,
nft_mint_acc_info: Option<&'a AccountInfo<'b>>,
vault: Pubkey,
authority: Pubkey,
fund_type: FundType,
nonce: u8,
max_balance: u64,
}
|
//! #290. 单词规律
//! 给定一种规律 pattern 和一个字符串 str ,判断 str 是否遵循相同的规律。
//! 这里的 遵循 指完全匹配,例如, pattern 里的每个字母和字符串 str 中的每个非空单词之间存在着双向连接的对应规律。
//! #解题思路
//! 分别进行hash,匹配出现的位置是否相同
use std::collections::HashMap;
pub struct Solution;
impl Solution {
pub fn word_pattern(pattern: String, s: String) -> bool {
if pattern.is_empty() || s.is_empty() {
return false;
}
let patterns: Vec<char> = pattern.chars().collect();
let mut patterns_hash = HashMap::new();
patterns_hash.insert(patterns[0], 0 as usize);
let words: Vec<&str> = s.split(' ').collect();
let mut words_hash = HashMap::new();
words_hash.insert(words[0], 0 as usize);
if patterns.len() != words.len() {
return false;
}
for i in 1..patterns.len() {
match (patterns_hash.get(&patterns[i]), words_hash.get(&words[i])) {
(Some(j), Some(k)) => {
if j != k {
return false;
}
}
(None, None) => {
patterns_hash.insert(patterns[i], i);
words_hash.insert(words[i], i);
}
_ => return false,
}
}
true
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(
super::Solution::word_pattern("aaa".into(), "aa aa aa aa".into()),
false
);
assert_eq!(
super::Solution::word_pattern("abca".into(), "aa ab ac aa".into()),
true
);
}
}
|
// Copyright (c) 2018 The rust-gpio-cdev Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
error_chain! {
types {
Error,
ErrorKind,
ResultExt,
Result;
}
foreign_links {
Nix(::nix::Error);
Io(::std::io::Error);
}
}
|
use crate::StorageExecutor;
use actix_web::{
web::{Data, Json},
Error, HttpResponse,
};
use proger_core::protocol::{request::CreateStepPage, response::PageAccess};
use crate::storage::storage_driver::{StorageCmd, StorageDriver};
use actix::Addr;
pub async fn create_step_page<T: StorageDriver>(
payload: Json<CreateStepPage>,
storage: Data<Addr<StorageExecutor<T>>>,
) -> Result<HttpResponse, Error> {
let result = storage
.into_inner()
.send(StorageCmd::CreateStepPage(payload.into_inner()))
.await?;
match result {
Ok(page) => Ok(HttpResponse::Ok().json(PageAccess {
admin_secret: page.secret,
link: page.link,
})),
Err(e) => Ok(HttpResponse::BadRequest().finish()),
}
}
|
use std::marker::PhantomData;
use necsim_core_bond::{NonNegativeF64, PositiveF64};
use priority_queue::PriorityQueue;
use necsim_core::{
cogs::{
Backup, CoalescenceSampler, DispersalSampler, EmigrationExit, GloballyCoherentLineageStore,
Habitat, ImmigrationEntry, LineageReference, RngCore, SpeciationProbability, TurnoverRate,
},
landscape::Location,
};
use necsim_impls_no_std::cogs::event_sampler::gillespie::{
GillespieEventSampler, GillespiePartialSimulation,
};
mod event_time;
mod sampler;
use event_time::EventTime;
#[allow(clippy::module_name_repetitions)]
#[allow(clippy::type_complexity)]
pub struct GillespieActiveLineageSampler<
H: Habitat,
G: RngCore,
R: LineageReference<H>,
S: GloballyCoherentLineageStore<H, R>,
X: EmigrationExit<H, G, R, S>,
D: DispersalSampler<H, G>,
C: CoalescenceSampler<H, R, S>,
T: TurnoverRate<H>,
N: SpeciationProbability<H>,
E: GillespieEventSampler<H, G, R, S, X, D, C, T, N>,
I: ImmigrationEntry,
> {
active_locations: PriorityQueue<Location, EventTime>,
number_active_lineages: usize,
last_event_time: NonNegativeF64,
marker: PhantomData<(H, G, R, S, X, D, C, T, N, E, I)>,
}
impl<
H: Habitat,
G: RngCore,
R: LineageReference<H>,
S: GloballyCoherentLineageStore<H, R>,
X: EmigrationExit<H, G, R, S>,
D: DispersalSampler<H, G>,
C: CoalescenceSampler<H, R, S>,
T: TurnoverRate<H>,
N: SpeciationProbability<H>,
E: GillespieEventSampler<H, G, R, S, X, D, C, T, N>,
I: ImmigrationEntry,
> GillespieActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I>
{
#[must_use]
pub fn new(
partial_simulation: &GillespiePartialSimulation<H, G, R, S, D, C, T, N>,
event_sampler: &E,
rng: &mut G,
) -> Self {
use necsim_core::cogs::RngSampler;
let mut active_locations: Vec<(Location, EventTime)> = Vec::new();
let mut number_active_lineages: usize = 0;
partial_simulation
.lineage_store
.iter_active_locations(&partial_simulation.habitat)
.for_each(|location| {
let number_active_lineages_at_location = partial_simulation
.lineage_store
.get_active_local_lineage_references_at_location_unordered(
&location,
&partial_simulation.habitat,
)
.len();
if number_active_lineages_at_location > 0 {
// All lineages were just initially inserted into the lineage store,
// so all active lineages are in the lineage store
if let Ok(event_rate_at_location) = PositiveF64::new(
event_sampler
.get_event_rate_at_location(&location, partial_simulation)
.get(),
) {
active_locations.push((
location,
EventTime::from(rng.sample_exponential(event_rate_at_location)),
));
number_active_lineages += number_active_lineages_at_location;
}
}
});
Self {
active_locations: PriorityQueue::from(active_locations),
number_active_lineages,
last_event_time: NonNegativeF64::zero(),
marker: PhantomData::<(H, G, R, S, X, D, C, T, N, E, I)>,
}
}
}
impl<
H: Habitat,
G: RngCore,
R: LineageReference<H>,
S: GloballyCoherentLineageStore<H, R>,
X: EmigrationExit<H, G, R, S>,
D: DispersalSampler<H, G>,
C: CoalescenceSampler<H, R, S>,
T: TurnoverRate<H>,
N: SpeciationProbability<H>,
E: GillespieEventSampler<H, G, R, S, X, D, C, T, N>,
I: ImmigrationEntry,
> core::fmt::Debug for GillespieActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I>
{
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_struct("GillespieActiveLineageSampler")
.field("active_locations", &"PriorityQueue")
.field("number_active_lineages", &self.number_active_lineages)
.field("marker", &self.marker)
.finish()
}
}
#[contract_trait]
impl<
H: Habitat,
G: RngCore,
R: LineageReference<H>,
S: GloballyCoherentLineageStore<H, R>,
X: EmigrationExit<H, G, R, S>,
D: DispersalSampler<H, G>,
C: CoalescenceSampler<H, R, S>,
T: TurnoverRate<H>,
N: SpeciationProbability<H>,
E: GillespieEventSampler<H, G, R, S, X, D, C, T, N>,
I: ImmigrationEntry,
> Backup for GillespieActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I>
{
unsafe fn backup_unchecked(&self) -> Self {
Self {
active_locations: self.active_locations.clone(),
number_active_lineages: self.number_active_lineages,
last_event_time: self.last_event_time,
marker: PhantomData::<(H, G, R, S, X, D, C, T, N, E, I)>,
}
}
}
|
import!(vsl_ast);
import!(vsl_decl);
import!(vsl_entity);
import!(vsl_typealias);
import!(vsl_type);
|
//std::slice::from_raw_parts;
#[repr(packed)]
pub struct Deposit{
pub_key: [i8;48],
withdrawal_credentials: [i8;48],
amount: u64,
}
#[repr(packed)]
#[allow(dead_code)]
pub struct Args {
pre_state: [i8;32],
post_state: [i8;32],
deposit_count: u32,
deposits: *mut Deposit,
block_size: u32,
block_data: [i8;0],
}
#[no_mangle]
pub unsafe extern "C" fn transition(args: *mut Args) -> i32 {
if (*args).block_size == 0 {
return 1;
}
// Put deposits
if (*args).block_size==1 {
(*args).deposit_count = 2;
let d0: *mut Deposit = (*args).deposits.offset(0);
(*d0).pub_key[1] = 1;
(*d0).withdrawal_credentials[2] = 2;
(*d0).amount = 3;
let d1: *mut Deposit = (*args).deposits.offset(1);
(*d1).pub_key[4] = 4;
(*d1).withdrawal_credentials[5] = 5;
(*d1).amount = 0xFFFFFFFFFFFFFFFF;
}
// Write block[1] to postState byte indicated by block[0]
if (*args).block_size==2 {
let block = &(*args).block_data;
(*args).post_state[ *block.get_unchecked(0) as usize ] = *block.get_unchecked(1);
}
return 0;
} |
//! Tests auto-converted from "sass-spec/spec/non_conformant/parser/interpolate/44_selector/double_escape"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/double_escape/12_double_escaped_interpolated_value_todo.hrx"
#[test]
fn t12_double_escaped_interpolated_value_todo() {
assert_eq!(
rsass(
"$key: \'bar\';\
\n.test12#{\'\\\\@#{$key}\'} { content: \'1.2\'; }\
\n"
)
.unwrap(),
".test12\\@bar {\
\n content: \"1.2\";\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/double_escape/22_double_escaped_interpolated_variable.hrx"
#[test]
fn t22_double_escaped_interpolated_variable() {
assert_eq!(
rsass(
"$key: \'bar\';\
\n$suffix2: \'\\\\@#{$key}\';\
\n.test22#{$suffix2} { content: \'2.2\'; }\
\n"
)
.unwrap(),
".test22\\@bar {\
\n content: \"2.2\";\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/44_selector/double_escape/32_double_escaped_literal.hrx"
#[test]
fn t32_double_escaped_literal() {
assert_eq!(
rsass(
".test32#{\'\\\\@baz\'} { content: \'3.2\'; }\
\n"
)
.unwrap(),
".test32\\@baz {\
\n content: \"3.2\";\
\n}\
\n"
);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.