text stringlengths 8 4.13M |
|---|
//! Integral code supporting both scalars and vectors.
use super::*;
pub mod scalar;
pub mod vector;
use std::ops::{Add, Shl, Shr, Rem};
use num_traits::One;
use arithimpl::traits::ConvertFrom;
use std::marker::PhantomData;
/// Integral code for scalars and vectors.
pub struct Code<I> {
/// Number of components to expect in vectors.
pub component_count: usize,
/// Bits to allocate for each component in vectors, including gap space.
pub component_size: usize,
pub _phantom: PhantomData<I>
}
impl<I> Code<I> {
pub fn default() -> Code<I> {
Self::new(10, 64)
}
pub fn new(component_count: usize, component_size: usize) -> Code<I> {
Code {
component_count: component_count,
component_size: component_size,
_phantom: PhantomData,
}
}
}
// impl<I> Encoder<usize> for Code<I>
// where
// I: From<usize>,
// {
// type Target=scalar::Plaintext<I, usize>;
// fn encode(&self, x: &usize) -> Self::Target {
// scalar::Plaintext {
// data: basic::Plaintext(I::from(*x)),
// _phantom: PhantomData,
// }
// }
// }
//
//
// impl<I> Encoder<u8> for Code<I>
// where
// I: From<u8>,
// {
// type Target=scalar::Plaintext<I, u8>;
// fn encode(&self, x: &u8) -> Self::Target {
// scalar::Plaintext {
// data: basic::Plaintext(I::from(*x)),
// _phantom: PhantomData,
// }
// }
// }
//
//
// impl<I> Encoder<u16> for Code<I>
// where
// I: From<u16>,
// {
// type Target=scalar::Plaintext<I, u16>;
// fn encode(&self, x: &u16) -> Self::Target {
// scalar::Plaintext {
// data: basic::Plaintext(I::from(*x)),
// _phantom: PhantomData,
// }
// }
// }
//
//
// impl<I> Encoder<u32> for Code<I>
// where
// I: From<u32>,
// {
// type Target=scalar::Plaintext<I, u32>;
// fn encode(&self, x: &u32) -> Self::Target {
// scalar::Plaintext {
// data: basic::Plaintext(I::from(*x)),
// _phantom: PhantomData,
// }
// }
// }
impl<I> Encoder<u64> for Code<I>
where
I: From<u64>,
{
type Target=scalar::Plaintext<I, u64>;
fn encode(&self, x: &u64) -> Self::Target {
scalar::Plaintext {
data: core::Plaintext(I::from(*x)),
_phantom: PhantomData,
}
}
}
impl<I> Encoder<Vec<u64>> for Code<I>
where
I: One,
I: Clone,
I: From<u64>,
I: Shl<usize, Output=I>,
I: Add<I, Output=I>,
for<'a,'b> &'a I: Rem<&'b I, Output=I>,
for<'a> &'a I: Shr<usize, Output=I>,
{
type Target=vector::Plaintext<I, u64>;
fn encode(&self, x: &Vec<u64>) -> Self::Target {
vector::Plaintext {
data: core::Plaintext(pack(x, self.component_count, self.component_size)),
component_count: self.component_count,
component_size: self.component_size,
_phantom: PhantomData,
}
}
}
// impl<I> Decoder<usize> for Code<I>
// where
// usize: ConvertFrom<I>,
// {
// type Source=scalar::Plaintext<I, usize>;
// fn decode(&self, x: &scalar::Plaintext<I, usize>) -> usize {
// usize::_from(&x.data.0)
// }
// }
//
// impl<I> Decoder<u8> for Code<I>
// where
// u8: ConvertFrom<I>,
// {
// type Source=scalar::Plaintext<I, u8>;
// fn decode(&self, x: &scalar::Plaintext<I, u8>) -> u8 {
// u8::_from(&x.data.0)
// }
// }
//
// impl<I> Decoder<u16> for Code<I>
// where
// u16: ConvertFrom<I>,
// {
// type Source=scalar::Plaintext<I, u16>;
// fn decode(&self, x: &scalar::Plaintext<I, u16>) -> u16 {
// u16::_from(&x.data.0)
// }
// }
//
// impl<I> Decoder<u32> for Code<I>
// where
// u32: ConvertFrom<I>,
// {
// type Source=scalar::Plaintext<I, u32>;
// fn decode(&self, x: &scalar::Plaintext<I, u32>) -> u32 {
// u32::_from(&x.data.0)
// }
// }
impl<I> Decoder<u64> for Code<I>
where
u64: ConvertFrom<I>,
{
type Source=scalar::Plaintext<I, u64>;
fn decode(&self, x: &scalar::Plaintext<I, u64>) -> u64 {
u64::_from(&x.data.0)
}
}
impl<I> Decoder<Vec<u64>> for Code<I>
where
u64: ConvertFrom<I>,
I: One,
I: Clone,
I: From<u64>,
I: Shl<usize, Output=I>,
I: Add<I, Output=I>,
for<'a,'b> &'a I: Rem<&'b I, Output=I>,
for<'a> &'a I: Shr<usize, Output=I>,
{
type Source=vector::Plaintext<I, u64>;
fn decode(&self, x: &vector::Plaintext<I, u64>) -> Vec<u64> {
unpack(x.data.0.clone(), self.component_count, self.component_size)
}
}
|
//Estimated number of people
pub fn query_num() {}
//Crowd portrait
fn query_portrait() {}
//User ids
fn query_uesr_list() {}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
#[repr(transparent)]
pub struct WiFiAccessStatus(pub i32);
impl WiFiAccessStatus {
pub const Unspecified: Self = Self(0i32);
pub const Allowed: Self = Self(1i32);
pub const DeniedByUser: Self = Self(2i32);
pub const DeniedBySystem: Self = Self(3i32);
}
impl ::core::marker::Copy for WiFiAccessStatus {}
impl ::core::clone::Clone for WiFiAccessStatus {
fn clone(&self) -> Self {
*self
}
}
pub type WiFiAdapter = *mut ::core::ffi::c_void;
pub type WiFiAvailableNetwork = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct WiFiConnectionMethod(pub i32);
impl WiFiConnectionMethod {
pub const Default: Self = Self(0i32);
pub const WpsPin: Self = Self(1i32);
pub const WpsPushButton: Self = Self(2i32);
}
impl ::core::marker::Copy for WiFiConnectionMethod {}
impl ::core::clone::Clone for WiFiConnectionMethod {
fn clone(&self) -> Self {
*self
}
}
pub type WiFiConnectionResult = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct WiFiConnectionStatus(pub i32);
impl WiFiConnectionStatus {
pub const UnspecifiedFailure: Self = Self(0i32);
pub const Success: Self = Self(1i32);
pub const AccessRevoked: Self = Self(2i32);
pub const InvalidCredential: Self = Self(3i32);
pub const NetworkNotAvailable: Self = Self(4i32);
pub const Timeout: Self = Self(5i32);
pub const UnsupportedAuthenticationProtocol: Self = Self(6i32);
}
impl ::core::marker::Copy for WiFiConnectionStatus {}
impl ::core::clone::Clone for WiFiConnectionStatus {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct WiFiNetworkKind(pub i32);
impl WiFiNetworkKind {
pub const Any: Self = Self(0i32);
pub const Infrastructure: Self = Self(1i32);
pub const Adhoc: Self = Self(2i32);
}
impl ::core::marker::Copy for WiFiNetworkKind {}
impl ::core::clone::Clone for WiFiNetworkKind {
fn clone(&self) -> Self {
*self
}
}
pub type WiFiNetworkReport = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct WiFiPhyKind(pub i32);
impl WiFiPhyKind {
pub const Unknown: Self = Self(0i32);
pub const Fhss: Self = Self(1i32);
pub const Dsss: Self = Self(2i32);
pub const IRBaseband: Self = Self(3i32);
pub const Ofdm: Self = Self(4i32);
pub const Hrdsss: Self = Self(5i32);
pub const Erp: Self = Self(6i32);
pub const HT: Self = Self(7i32);
pub const Vht: Self = Self(8i32);
pub const Dmg: Self = Self(9i32);
pub const HE: Self = Self(10i32);
}
impl ::core::marker::Copy for WiFiPhyKind {}
impl ::core::clone::Clone for WiFiPhyKind {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct WiFiReconnectionKind(pub i32);
impl WiFiReconnectionKind {
pub const Automatic: Self = Self(0i32);
pub const Manual: Self = Self(1i32);
}
impl ::core::marker::Copy for WiFiReconnectionKind {}
impl ::core::clone::Clone for WiFiReconnectionKind {
fn clone(&self) -> Self {
*self
}
}
pub type WiFiWpsConfigurationResult = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct WiFiWpsConfigurationStatus(pub i32);
impl WiFiWpsConfigurationStatus {
pub const UnspecifiedFailure: Self = Self(0i32);
pub const Success: Self = Self(1i32);
pub const Timeout: Self = Self(2i32);
}
impl ::core::marker::Copy for WiFiWpsConfigurationStatus {}
impl ::core::clone::Clone for WiFiWpsConfigurationStatus {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct WiFiWpsKind(pub i32);
impl WiFiWpsKind {
pub const Unknown: Self = Self(0i32);
pub const Pin: Self = Self(1i32);
pub const PushButton: Self = Self(2i32);
pub const Nfc: Self = Self(3i32);
pub const Ethernet: Self = Self(4i32);
pub const Usb: Self = Self(5i32);
}
impl ::core::marker::Copy for WiFiWpsKind {}
impl ::core::clone::Clone for WiFiWpsKind {
fn clone(&self) -> Self {
*self
}
}
|
//! User and Group ID types.
#![allow(unsafe_code)]
use crate::backend::c;
/// A group identifier as a raw integer.
#[cfg(not(target_os = "wasi"))]
pub type RawGid = c::gid_t;
/// A user identifier as a raw integer.
#[cfg(not(target_os = "wasi"))]
pub type RawUid = c::uid_t;
/// `uid_t`—A Unix user ID.
#[repr(transparent)]
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub struct Uid(RawUid);
/// `gid_t`—A Unix group ID.
#[repr(transparent)]
#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
pub struct Gid(RawGid);
impl Uid {
/// A `Uid` corresponding to the root user (uid 0).
pub const ROOT: Self = Self(0);
/// Converts a `RawUid` into a `Uid`.
///
/// # Safety
///
/// `raw` must be the value of a valid Unix user ID.
#[inline]
pub const unsafe fn from_raw(raw: RawUid) -> Self {
Self(raw)
}
/// Converts a `Uid` into a `RawUid`.
#[inline]
pub const fn as_raw(self) -> RawUid {
self.0
}
/// Test whether this uid represents the root user (uid 0).
#[inline]
pub const fn is_root(self) -> bool {
self.0 == Self::ROOT.0
}
}
impl Gid {
/// A `Gid` corresponding to the root group (gid 0).
pub const ROOT: Self = Self(0);
/// Converts a `RawGid` into a `Gid`.
///
/// # Safety
///
/// `raw` must be the value of a valid Unix group ID.
#[inline]
pub const unsafe fn from_raw(raw: RawGid) -> Self {
Self(raw)
}
/// Converts a `Gid` into a `RawGid`.
#[inline]
pub const fn as_raw(self) -> RawGid {
self.0
}
/// Test whether this gid represents the root group (gid 0).
#[inline]
pub const fn is_root(self) -> bool {
self.0 == Self::ROOT.0
}
}
// Return the raw value of the IDs. In case of `None` it returns `!0` since it
// has the same bit pattern as `-1` indicating no change to the owner/group ID.
pub(crate) fn translate_fchown_args(owner: Option<Uid>, group: Option<Gid>) -> (RawUid, RawGid) {
let ow = match owner {
Some(o) => o.as_raw(),
None => !0,
};
let gr = match group {
Some(g) => g.as_raw(),
None => !0,
};
(ow, gr)
}
#[test]
fn test_sizes() {
assert_eq_size!(RawUid, u32);
assert_eq_size!(RawGid, u32);
}
|
use super::wizard::Wizard;
use super::world::World;
#[derive(Clone, Debug, PartialEq)]
pub struct PlayerContext {
pub wizards: Vec<Wizard>,
pub world: World,
}
|
use super::{exp, fabs, get_high_word, with_set_low_word};
/* origin: FreeBSD /usr/src/lib/msun/src/s_erf.c */
/*
* ====================================================
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunPro, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* ====================================================
*/
/* double erf(double x)
* double erfc(double x)
* x
* 2 |\
* erf(x) = --------- | exp(-t*t)dt
* sqrt(pi) \|
* 0
*
* erfc(x) = 1-erf(x)
* Note that
* erf(-x) = -erf(x)
* erfc(-x) = 2 - erfc(x)
*
* Method:
* 1. For |x| in [0, 0.84375]
* erf(x) = x + x*R(x^2)
* erfc(x) = 1 - erf(x) if x in [-.84375,0.25]
* = 0.5 + ((0.5-x)-x*R) if x in [0.25,0.84375]
* where R = P/Q where P is an odd poly of degree 8 and
* Q is an odd poly of degree 10.
* -57.90
* | R - (erf(x)-x)/x | <= 2
*
*
* Remark. The formula is derived by noting
* erf(x) = (2/sqrt(pi))*(x - x^3/3 + x^5/10 - x^7/42 + ....)
* and that
* 2/sqrt(pi) = 1.128379167095512573896158903121545171688
* is close to one. The interval is chosen because the fix
* point of erf(x) is near 0.6174 (i.e., erf(x)=x when x is
* near 0.6174), and by some experiment, 0.84375 is chosen to
* guarantee the error is less than one ulp for erf.
*
* 2. For |x| in [0.84375,1.25], let s = |x| - 1, and
* c = 0.84506291151 rounded to single (24 bits)
* erf(x) = sign(x) * (c + P1(s)/Q1(s))
* erfc(x) = (1-c) - P1(s)/Q1(s) if x > 0
* 1+(c+P1(s)/Q1(s)) if x < 0
* |P1/Q1 - (erf(|x|)-c)| <= 2**-59.06
* Remark: here we use the taylor series expansion at x=1.
* erf(1+s) = erf(1) + s*Poly(s)
* = 0.845.. + P1(s)/Q1(s)
* That is, we use rational approximation to approximate
* erf(1+s) - (c = (single)0.84506291151)
* Note that |P1/Q1|< 0.078 for x in [0.84375,1.25]
* where
* P1(s) = degree 6 poly in s
* Q1(s) = degree 6 poly in s
*
* 3. For x in [1.25,1/0.35(~2.857143)],
* erfc(x) = (1/x)*exp(-x*x-0.5625+R1/S1)
* erf(x) = 1 - erfc(x)
* where
* R1(z) = degree 7 poly in z, (z=1/x^2)
* S1(z) = degree 8 poly in z
*
* 4. For x in [1/0.35,28]
* erfc(x) = (1/x)*exp(-x*x-0.5625+R2/S2) if x > 0
* = 2.0 - (1/x)*exp(-x*x-0.5625+R2/S2) if -6<x<0
* = 2.0 - tiny (if x <= -6)
* erf(x) = sign(x)*(1.0 - erfc(x)) if x < 6, else
* erf(x) = sign(x)*(1.0 - tiny)
* where
* R2(z) = degree 6 poly in z, (z=1/x^2)
* S2(z) = degree 7 poly in z
*
* Note1:
* To compute exp(-x*x-0.5625+R/S), let s be a single
* precision number and s := x; then
* -x*x = -s*s + (s-x)*(s+x)
* exp(-x*x-0.5626+R/S) =
* exp(-s*s-0.5625)*exp((s-x)*(s+x)+R/S);
* Note2:
* Here 4 and 5 make use of the asymptotic series
* exp(-x*x)
* erfc(x) ~ ---------- * ( 1 + Poly(1/x^2) )
* x*sqrt(pi)
* We use rational approximation to approximate
* g(s)=f(1/x^2) = log(erfc(x)*x) - x*x + 0.5625
* Here is the error bound for R1/S1 and R2/S2
* |R1/S1 - f(x)| < 2**(-62.57)
* |R2/S2 - f(x)| < 2**(-61.52)
*
* 5. For inf > x >= 28
* erf(x) = sign(x) *(1 - tiny) (raise inexact)
* erfc(x) = tiny*tiny (raise underflow) if x > 0
* = 2 - tiny if x<0
*
* 7. Special case:
* erf(0) = 0, erf(inf) = 1, erf(-inf) = -1,
* erfc(0) = 1, erfc(inf) = 0, erfc(-inf) = 2,
* erfc/erf(NaN) is NaN
*/
const ERX: f64 = 8.45062911510467529297e-01; /* 0x3FEB0AC1, 0x60000000 */
/*
* Coefficients for approximation to erf on [0,0.84375]
*/
const EFX8: f64 = 1.02703333676410069053e+00; /* 0x3FF06EBA, 0x8214DB69 */
const PP0: f64 = 1.28379167095512558561e-01; /* 0x3FC06EBA, 0x8214DB68 */
const PP1: f64 = -3.25042107247001499370e-01; /* 0xBFD4CD7D, 0x691CB913 */
const PP2: f64 = -2.84817495755985104766e-02; /* 0xBF9D2A51, 0xDBD7194F */
const PP3: f64 = -5.77027029648944159157e-03; /* 0xBF77A291, 0x236668E4 */
const PP4: f64 = -2.37630166566501626084e-05; /* 0xBEF8EAD6, 0x120016AC */
const QQ1: f64 = 3.97917223959155352819e-01; /* 0x3FD97779, 0xCDDADC09 */
const QQ2: f64 = 6.50222499887672944485e-02; /* 0x3FB0A54C, 0x5536CEBA */
const QQ3: f64 = 5.08130628187576562776e-03; /* 0x3F74D022, 0xC4D36B0F */
const QQ4: f64 = 1.32494738004321644526e-04; /* 0x3F215DC9, 0x221C1A10 */
const QQ5: f64 = -3.96022827877536812320e-06; /* 0xBED09C43, 0x42A26120 */
/*
* Coefficients for approximation to erf in [0.84375,1.25]
*/
const PA0: f64 = -2.36211856075265944077e-03; /* 0xBF6359B8, 0xBEF77538 */
const PA1: f64 = 4.14856118683748331666e-01; /* 0x3FDA8D00, 0xAD92B34D */
const PA2: f64 = -3.72207876035701323847e-01; /* 0xBFD7D240, 0xFBB8C3F1 */
const PA3: f64 = 3.18346619901161753674e-01; /* 0x3FD45FCA, 0x805120E4 */
const PA4: f64 = -1.10894694282396677476e-01; /* 0xBFBC6398, 0x3D3E28EC */
const PA5: f64 = 3.54783043256182359371e-02; /* 0x3FA22A36, 0x599795EB */
const PA6: f64 = -2.16637559486879084300e-03; /* 0xBF61BF38, 0x0A96073F */
const QA1: f64 = 1.06420880400844228286e-01; /* 0x3FBB3E66, 0x18EEE323 */
const QA2: f64 = 5.40397917702171048937e-01; /* 0x3FE14AF0, 0x92EB6F33 */
const QA3: f64 = 7.18286544141962662868e-02; /* 0x3FB2635C, 0xD99FE9A7 */
const QA4: f64 = 1.26171219808761642112e-01; /* 0x3FC02660, 0xE763351F */
const QA5: f64 = 1.36370839120290507362e-02; /* 0x3F8BEDC2, 0x6B51DD1C */
const QA6: f64 = 1.19844998467991074170e-02; /* 0x3F888B54, 0x5735151D */
/*
* Coefficients for approximation to erfc in [1.25,1/0.35]
*/
const RA0: f64 = -9.86494403484714822705e-03; /* 0xBF843412, 0x600D6435 */
const RA1: f64 = -6.93858572707181764372e-01; /* 0xBFE63416, 0xE4BA7360 */
const RA2: f64 = -1.05586262253232909814e+01; /* 0xC0251E04, 0x41B0E726 */
const RA3: f64 = -6.23753324503260060396e+01; /* 0xC04F300A, 0xE4CBA38D */
const RA4: f64 = -1.62396669462573470355e+02; /* 0xC0644CB1, 0x84282266 */
const RA5: f64 = -1.84605092906711035994e+02; /* 0xC067135C, 0xEBCCABB2 */
const RA6: f64 = -8.12874355063065934246e+01; /* 0xC0545265, 0x57E4D2F2 */
const RA7: f64 = -9.81432934416914548592e+00; /* 0xC023A0EF, 0xC69AC25C */
const SA1: f64 = 1.96512716674392571292e+01; /* 0x4033A6B9, 0xBD707687 */
const SA2: f64 = 1.37657754143519042600e+02; /* 0x4061350C, 0x526AE721 */
const SA3: f64 = 4.34565877475229228821e+02; /* 0x407B290D, 0xD58A1A71 */
const SA4: f64 = 6.45387271733267880336e+02; /* 0x40842B19, 0x21EC2868 */
const SA5: f64 = 4.29008140027567833386e+02; /* 0x407AD021, 0x57700314 */
const SA6: f64 = 1.08635005541779435134e+02; /* 0x405B28A3, 0xEE48AE2C */
const SA7: f64 = 6.57024977031928170135e+00; /* 0x401A47EF, 0x8E484A93 */
const SA8: f64 = -6.04244152148580987438e-02; /* 0xBFAEEFF2, 0xEE749A62 */
/*
* Coefficients for approximation to erfc in [1/.35,28]
*/
const RB0: f64 = -9.86494292470009928597e-03; /* 0xBF843412, 0x39E86F4A */
const RB1: f64 = -7.99283237680523006574e-01; /* 0xBFE993BA, 0x70C285DE */
const RB2: f64 = -1.77579549177547519889e+01; /* 0xC031C209, 0x555F995A */
const RB3: f64 = -1.60636384855821916062e+02; /* 0xC064145D, 0x43C5ED98 */
const RB4: f64 = -6.37566443368389627722e+02; /* 0xC083EC88, 0x1375F228 */
const RB5: f64 = -1.02509513161107724954e+03; /* 0xC0900461, 0x6A2E5992 */
const RB6: f64 = -4.83519191608651397019e+02; /* 0xC07E384E, 0x9BDC383F */
const SB1: f64 = 3.03380607434824582924e+01; /* 0x403E568B, 0x261D5190 */
const SB2: f64 = 3.25792512996573918826e+02; /* 0x40745CAE, 0x221B9F0A */
const SB3: f64 = 1.53672958608443695994e+03; /* 0x409802EB, 0x189D5118 */
const SB4: f64 = 3.19985821950859553908e+03; /* 0x40A8FFB7, 0x688C246A */
const SB5: f64 = 2.55305040643316442583e+03; /* 0x40A3F219, 0xCEDF3BE6 */
const SB6: f64 = 4.74528541206955367215e+02; /* 0x407DA874, 0xE79FE763 */
const SB7: f64 = -2.24409524465858183362e+01; /* 0xC03670E2, 0x42712D62 */
fn erfc1(x: f64) -> f64 {
let s: f64;
let p: f64;
let q: f64;
s = fabs(x) - 1.0;
p = PA0 + s * (PA1 + s * (PA2 + s * (PA3 + s * (PA4 + s * (PA5 + s * PA6)))));
q = 1.0 + s * (QA1 + s * (QA2 + s * (QA3 + s * (QA4 + s * (QA5 + s * QA6)))));
1.0 - ERX - p / q
}
fn erfc2(ix: u32, mut x: f64) -> f64 {
let s: f64;
let r: f64;
let big_s: f64;
let z: f64;
if ix < 0x3ff40000 {
/* |x| < 1.25 */
return erfc1(x);
}
x = fabs(x);
s = 1.0 / (x * x);
if ix < 0x4006db6d {
/* |x| < 1/.35 ~ 2.85714 */
r = RA0 + s * (RA1 + s * (RA2 + s * (RA3 + s * (RA4 + s * (RA5 + s * (RA6 + s * RA7))))));
big_s = 1.0
+ s * (SA1
+ s * (SA2 + s * (SA3 + s * (SA4 + s * (SA5 + s * (SA6 + s * (SA7 + s * SA8)))))));
} else {
/* |x| > 1/.35 */
r = RB0 + s * (RB1 + s * (RB2 + s * (RB3 + s * (RB4 + s * (RB5 + s * RB6)))));
big_s =
1.0 + s * (SB1 + s * (SB2 + s * (SB3 + s * (SB4 + s * (SB5 + s * (SB6 + s * SB7))))));
}
z = with_set_low_word(x, 0);
exp(-z * z - 0.5625) * exp((z - x) * (z + x) + r / big_s) / x
}
/// Error function (f64)
///
/// Calculates an approximation to the “error function”, which estimates
/// the probability that an observation will fall within x standard
/// deviations of the mean (assuming a normal distribution).
#[cfg_attr(all(test, assert_no_panic), no_panic::no_panic)]
pub fn erf(x: f64) -> f64 {
let r: f64;
let s: f64;
let z: f64;
let y: f64;
let mut ix: u32;
let sign: usize;
ix = get_high_word(x);
sign = (ix >> 31) as usize;
ix &= 0x7fffffff;
if ix >= 0x7ff00000 {
/* erf(nan)=nan, erf(+-inf)=+-1 */
return 1.0 - 2.0 * (sign as f64) + 1.0 / x;
}
if ix < 0x3feb0000 {
/* |x| < 0.84375 */
if ix < 0x3e300000 {
/* |x| < 2**-28 */
/* avoid underflow */
return 0.125 * (8.0 * x + EFX8 * x);
}
z = x * x;
r = PP0 + z * (PP1 + z * (PP2 + z * (PP3 + z * PP4)));
s = 1.0 + z * (QQ1 + z * (QQ2 + z * (QQ3 + z * (QQ4 + z * QQ5))));
y = r / s;
return x + x * y;
}
if ix < 0x40180000 {
/* 0.84375 <= |x| < 6 */
y = 1.0 - erfc2(ix, x);
} else {
let x1p_1022 = f64::from_bits(0x0010000000000000);
y = 1.0 - x1p_1022;
}
if sign != 0 {
-y
} else {
y
}
}
/// Complementary error function (f64)
///
/// Calculates the complementary probability.
/// Is `1 - erf(x)`. Is computed directly, so that you can use it to avoid
/// the loss of precision that would result from subtracting
/// large probabilities (on large `x`) from 1.
pub fn erfc(x: f64) -> f64 {
let r: f64;
let s: f64;
let z: f64;
let y: f64;
let mut ix: u32;
let sign: usize;
ix = get_high_word(x);
sign = (ix >> 31) as usize;
ix &= 0x7fffffff;
if ix >= 0x7ff00000 {
/* erfc(nan)=nan, erfc(+-inf)=0,2 */
return 2.0 * (sign as f64) + 1.0 / x;
}
if ix < 0x3feb0000 {
/* |x| < 0.84375 */
if ix < 0x3c700000 {
/* |x| < 2**-56 */
return 1.0 - x;
}
z = x * x;
r = PP0 + z * (PP1 + z * (PP2 + z * (PP3 + z * PP4)));
s = 1.0 + z * (QQ1 + z * (QQ2 + z * (QQ3 + z * (QQ4 + z * QQ5))));
y = r / s;
if sign != 0 || ix < 0x3fd00000 {
/* x < 1/4 */
return 1.0 - (x + x * y);
}
return 0.5 - (x - 0.5 + x * y);
}
if ix < 0x403c0000 {
/* 0.84375 <= |x| < 28 */
if sign != 0 {
return 2.0 - erfc2(ix, x);
} else {
return erfc2(ix, x);
}
}
let x1p_1022 = f64::from_bits(0x0010000000000000);
if sign != 0 {
2.0 - x1p_1022
} else {
x1p_1022 * x1p_1022
}
}
|
const PATH: &str = "/v1/chain/push_transactions";
|
use crate::prelude::*;
use rstar::*;
pub struct HorizontalStrokes {
points: Vec<Point2>,
boundary_rtree: RTree<My<Point2>>,
}
pub fn new(app: &App, container: &Rect) -> HorizontalStrokes {
let points = load_denormalized_points(app, container);
let my_points: Vec<My<Point2>> = points.iter().map(|point| My(*point)).collect();
HorizontalStrokes {
points,
boundary_rtree: RTree::bulk_load(my_points),
}
}
fn load_denormalized_points(app: &App, container: &Rect) -> Vec<Point2> {
let points = crate::svg::parse_path(app, "horizontal_boundary.svg");
let width = container.lerp_w(0.889);
let height = container.lerp_w(0.233);
let top_left = pt2(0.0335, 0.4951111).denormalize(container);
let bottom_right = top_left + vec2(width, -height);
let bounds = Rect::from_corners(top_left, bottom_right);
points.denormalize_xy(&bounds)
}
impl HorizontalStrokes {
pub fn render(&self, params: &mut RenderParams) {
let draw = params.draw;
// let stroke_weight = params.container.lerp_w(0.001);
draw.polyline()
.points(self.points.clone())
.color(soft_black());
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[doc(hidden)]
pub struct IWalletItemSystemStore(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IWalletItemSystemStore {
type Vtable = IWalletItemSystemStore_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x522e2bff_96a2_4a17_8d19_fe1d9f837561);
}
#[repr(C)]
#[doc(hidden)]
pub struct IWalletItemSystemStore_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(all(feature = "Foundation", feature = "Foundation_Collections"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Foundation_Collections")))] usize,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, item: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(all(feature = "Foundation", feature = "Storage_Streams"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, stream: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Storage_Streams")))] usize,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, item: ::windows::core::RawPtr, result__: *mut WalletItemAppAssociation) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, item: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IWalletItemSystemStore2(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IWalletItemSystemStore2 {
type Vtable = IWalletItemSystemStore2_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xf98d3a4e_be00_4fdd_9734_6c113c1ac1cb);
}
#[repr(C)]
#[doc(hidden)]
pub struct IWalletItemSystemStore2_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, handler: ::windows::core::RawPtr, result__: *mut super::super::super::Foundation::EventRegistrationToken) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, cookie: super::super::super::Foundation::EventRegistrationToken) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IWalletManagerSystemStatics(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IWalletManagerSystemStatics {
type Vtable = IWalletManagerSystemStatics_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xbee8eb89_2634_4b9a_8b23_ee8903c91fe0);
}
#[repr(C)]
#[doc(hidden)]
pub struct IWalletManagerSystemStatics_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct WalletItemAppAssociation(pub i32);
impl WalletItemAppAssociation {
pub const None: WalletItemAppAssociation = WalletItemAppAssociation(0i32);
pub const AppInstalled: WalletItemAppAssociation = WalletItemAppAssociation(1i32);
pub const AppNotInstalled: WalletItemAppAssociation = WalletItemAppAssociation(2i32);
}
impl ::core::convert::From<i32> for WalletItemAppAssociation {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for WalletItemAppAssociation {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for WalletItemAppAssociation {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.ApplicationModel.Wallet.System.WalletItemAppAssociation;i4)");
}
impl ::windows::core::DefaultType for WalletItemAppAssociation {
type DefaultType = Self;
}
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct WalletItemSystemStore(pub ::windows::core::IInspectable);
impl WalletItemSystemStore {
#[cfg(all(feature = "Foundation", feature = "Foundation_Collections"))]
pub fn GetItemsAsync(&self) -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<super::super::super::Foundation::Collections::IVectorView<super::WalletItem>>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<super::super::super::Foundation::Collections::IVectorView<super::WalletItem>>>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn DeleteAsync<'a, Param0: ::windows::core::IntoParam<'a, super::WalletItem>>(&self, item: Param0) -> ::windows::core::Result<super::super::super::Foundation::IAsyncAction> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), item.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::IAsyncAction>(result__)
}
}
#[cfg(all(feature = "Foundation", feature = "Storage_Streams"))]
pub fn ImportItemAsync<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Storage::Streams::IRandomAccessStreamReference>>(&self, stream: Param0) -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<super::WalletItem>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), stream.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<super::WalletItem>>(result__)
}
}
pub fn GetAppStatusForItem<'a, Param0: ::windows::core::IntoParam<'a, super::WalletItem>>(&self, item: Param0) -> ::windows::core::Result<WalletItemAppAssociation> {
let this = self;
unsafe {
let mut result__: WalletItemAppAssociation = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), item.into_param().abi(), &mut result__).from_abi::<WalletItemAppAssociation>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn LaunchAppForItemAsync<'a, Param0: ::windows::core::IntoParam<'a, super::WalletItem>>(&self, item: Param0) -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<bool>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), item.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<bool>>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn ItemsChanged<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::TypedEventHandler<WalletItemSystemStore, ::windows::core::IInspectable>>>(&self, handler: Param0) -> ::windows::core::Result<super::super::super::Foundation::EventRegistrationToken> {
let this = &::windows::core::Interface::cast::<IWalletItemSystemStore2>(self)?;
unsafe {
let mut result__: super::super::super::Foundation::EventRegistrationToken = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), handler.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::EventRegistrationToken>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn RemoveItemsChanged<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::EventRegistrationToken>>(&self, cookie: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<IWalletItemSystemStore2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), cookie.into_param().abi()).ok() }
}
}
unsafe impl ::windows::core::RuntimeType for WalletItemSystemStore {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.ApplicationModel.Wallet.System.WalletItemSystemStore;{522e2bff-96a2-4a17-8d19-fe1d9f837561})");
}
unsafe impl ::windows::core::Interface for WalletItemSystemStore {
type Vtable = IWalletItemSystemStore_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x522e2bff_96a2_4a17_8d19_fe1d9f837561);
}
impl ::windows::core::RuntimeName for WalletItemSystemStore {
const NAME: &'static str = "Windows.ApplicationModel.Wallet.System.WalletItemSystemStore";
}
impl ::core::convert::From<WalletItemSystemStore> for ::windows::core::IUnknown {
fn from(value: WalletItemSystemStore) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&WalletItemSystemStore> for ::windows::core::IUnknown {
fn from(value: &WalletItemSystemStore) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for WalletItemSystemStore {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a WalletItemSystemStore {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<WalletItemSystemStore> for ::windows::core::IInspectable {
fn from(value: WalletItemSystemStore) -> Self {
value.0
}
}
impl ::core::convert::From<&WalletItemSystemStore> for ::windows::core::IInspectable {
fn from(value: &WalletItemSystemStore) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for WalletItemSystemStore {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a WalletItemSystemStore {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for WalletItemSystemStore {}
unsafe impl ::core::marker::Sync for WalletItemSystemStore {}
pub struct WalletManagerSystem {}
impl WalletManagerSystem {
#[cfg(feature = "Foundation")]
pub fn RequestStoreAsync() -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<WalletItemSystemStore>> {
Self::IWalletManagerSystemStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<WalletItemSystemStore>>(result__)
})
}
pub fn IWalletManagerSystemStatics<R, F: FnOnce(&IWalletManagerSystemStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<WalletManagerSystem, IWalletManagerSystemStatics> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
impl ::windows::core::RuntimeName for WalletManagerSystem {
const NAME: &'static str = "Windows.ApplicationModel.Wallet.System.WalletManagerSystem";
}
|
#![allow(clippy::type_complexity)]
use crate::{
component::{Collider2d, Collider2dBody, Collider2dInner, RigidBody2d, RigidBody2dInner},
resource::Physics2dWorld,
};
use core::{
app::AppLifeCycle,
ecs::{
storage::ComponentEvent, Entities, Entity, Join, ReadExpect, ReadStorage, ReaderId, System,
World, Write, WriteStorage,
},
};
use nphysics2d::object::{BodyPartHandle, DefaultBodyHandle, DefaultColliderHandle};
use std::collections::HashMap;
#[derive(Debug, Default)]
pub struct Physics2dSystem {
cached_bodies: HashMap<Entity, DefaultBodyHandle>,
cached_colliders: HashMap<Entity, DefaultColliderHandle>,
bodies_reader_id: Option<ReaderId<ComponentEvent>>,
colliders_reader_id: Option<ReaderId<ComponentEvent>>,
}
impl<'s> System<'s> for Physics2dSystem {
type SystemData = (
Entities<'s>,
ReadExpect<'s, AppLifeCycle>,
Option<Write<'s, Physics2dWorld>>,
WriteStorage<'s, RigidBody2d>,
WriteStorage<'s, Collider2d>,
ReadStorage<'s, Collider2dBody>,
);
fn setup(&mut self, world: &mut World) {
use core::ecs::SystemData;
Self::SystemData::setup(world);
self.bodies_reader_id = Some(WriteStorage::<RigidBody2d>::fetch(&world).register_reader());
self.colliders_reader_id =
Some(WriteStorage::<Collider2d>::fetch(&world).register_reader());
}
fn run(
&mut self,
(entities, lifecycle, world, mut bodies, mut colliders, colliders_body): Self::SystemData,
) {
if world.is_none() {
return;
}
let world: &mut Physics2dWorld = &mut world.unwrap();
let events = bodies
.channel()
.read(self.bodies_reader_id.as_mut().unwrap());
for event in events {
if let ComponentEvent::Removed(index) = event {
let found = self.cached_bodies.iter().find_map(|(entity, handle)| {
if entity.id() == *index {
Some((*entity, *handle))
} else {
None
}
});
if let Some((entity, handle)) = found {
self.cached_bodies.remove(&entity);
world.destroy_body(handle);
}
}
}
let events = colliders
.channel()
.read(self.colliders_reader_id.as_mut().unwrap());
for event in events {
if let ComponentEvent::Removed(index) = event {
let found = self.cached_colliders.iter().find_map(|(entity, handle)| {
if entity.id() == *index {
Some((*entity, *handle))
} else {
None
}
});
if let Some((entity, handle)) = found {
self.cached_colliders.remove(&entity);
world.destroy_collider(handle);
}
}
}
for (entity, body) in (&entities, &mut bodies).join() {
if !body.is_created() {
let b = body.take_description().unwrap().build();
let h = world.insert_body(b);
body.0 = RigidBody2dInner::Handle(h);
self.cached_bodies.insert(entity, h);
}
}
for (entity, collider, collider_body) in (&entities, &mut colliders, &colliders_body).join()
{
if !collider.is_created() {
let e = match collider_body {
Collider2dBody::Me => entity,
Collider2dBody::Entity(e) => *e,
};
if let Some(body) = bodies.get(e) {
if let Some(h) = body.handle() {
let c = collider
.take_description()
.unwrap()
.build(BodyPartHandle(h, 0));
let h = world.insert_collider(c, entity);
collider.0 = Collider2dInner::Handle(h);
self.cached_colliders.insert(entity, h);
}
}
}
}
world.process(lifecycle.delta_time_seconds());
}
}
|
use std::fs::File;
use std::io::{self, BufRead, Read};
pub struct Input<'a> {
source: Box<dyn BufRead + 'a>,
}
impl<'a> Input<'a> {
pub fn from_stdin(stdin: &'a io::Stdin) -> Input<'a> {
Input {
source: Box::new(stdin.lock()),
}
}
pub fn from_file(path: &str) -> io::Result<Input<'a>> {
File::open(path).map(|file| Input {
source: Box::new(io::BufReader::new(file)),
})
}
}
impl<'a> Read for Input<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.source.read(buf)
}
}
impl<'a> BufRead for Input<'a> {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
self.source.fill_buf()
}
fn consume(&mut self, amt: usize) {
self.source.consume(amt);
}
}
|
use alloc::alloc::{AllocError, Allocator};
use core::any::TypeId;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::ptr;
use seq_macro::seq;
use firefly_alloc::gc::GcBox;
use crate::function::ErlangResult;
use super::{Atom, OpaqueTerm};
/// This struct unifies function captures and closures under a single type.
///
/// Closure contains all the metadata about the callee required to answer questions like
/// what is the arity, what module was it defined in, etc.
///
/// Closures (as opposed to function captures) have an implicit extra argument that comes first
/// in the argument list of the callee, which is a fat pointer to the Closure struct. This enables
/// the callee to access the closed-over values from its environment.
///
/// Function captures do not have the extra self argument, and always have an implicitly empty environment.
#[repr(C)]
pub struct Closure {
pub module: Atom,
pub name: Atom,
pub arity: u8,
fun: *const (),
env: [OpaqueTerm],
}
impl fmt::Debug for Closure {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Closure")
.field("module", &self.module)
.field("function", &self.name)
.field("arity", &self.arity)
.field("fun", &self.fun)
.field("env", &&self.env)
.finish()
}
}
impl fmt::Display for Closure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#Fun<{}:{}/{}>", self.module, self.name, self.arity)
}
}
impl Closure {
pub const TYPE_ID: TypeId = TypeId::of::<Closure>();
/// Allocates a new GcBox'd closure with the given name, callee, and environment, using the provided allocator
///
/// # Safety
///
/// This is a risky low-level operation, and is only safe if the following guarantees are upheld by the caller:
///
/// * The callee pointer must point to an actual function
/// * The callee must be guaranteed to outlive the closure itself
/// * The callee must expect to receive `arity` arguments in addition to the closure self argument
pub fn new_in<A: Allocator>(
module: Atom,
name: Atom,
arity: u8,
fun: *const (),
env: &[OpaqueTerm],
alloc: A,
) -> Result<GcBox<Self>, AllocError> {
let mut this = GcBox::<Self>::with_capacity_in(env.len(), alloc)?;
this.module = module;
this.name = name;
this.arity = arity;
this.fun = fun;
this.env.copy_from_slice(env);
Ok(this)
}
pub unsafe fn with_capacity_in<A: Allocator>(
capacity: usize,
alloc: A,
) -> Result<GcBox<Self>, AllocError> {
GcBox::<Self>::with_capacity_in(capacity, alloc)
}
/// Returns true if this closure is a function capture, i.e. it has no free variables.
#[inline]
pub fn is_thin(&self) -> bool {
self.env.len() == 0
}
/// Returns the size of the environment (in units of `OpaqueTerm`) bound to this closure
#[inline]
pub fn env_size(&self) -> usize {
self.env.len()
}
pub fn env(&self) -> &[OpaqueTerm] {
&self.env
}
pub fn callee(&self) -> *const () {
self.fun
}
/// Copies the env from `other` into this closure's environment
///
/// This function will panic if the env arities are different
pub fn copy_from(&mut self, other: &Self) {
assert_eq!(self.env.len(), other.env.len());
self.env.copy_from_slice(&other.env);
}
/// Applies the given slice of arguments to this closure.
///
/// This function will panic if the number of arguments given does not match
/// the arity of the closure.
///
/// NOTE: Currently, a max arity of 10 is supported for dynamic apply via this function.
/// If the number of arguments exceeds this number, this function will panic.
#[inline]
pub fn apply(&self, args: &[OpaqueTerm]) -> ErlangResult {
seq!(N in 0..10 {
match args.len() {
#(
N => apply~N(self, args),
)*
n => panic!("apply failed: too many arguments, got {}, expected no more than 10", n),
}
})
}
}
seq!(A in 0..10 {
#(
seq!(N in 0..A {
/// This type represents a function which implements a closure of arity A
///
/// See the `Closure` docs for more information on how closures are implemented.
pub type Closure~A = extern "C" fn (&Closure,
#(
OpaqueTerm,
)*
) -> ErlangResult;
/// This type represents a function capture of arity A
///
/// This differs from `ClosureA` in that a function capture has no implicit self argument.
pub type Fun~A = extern "C" fn (#(OpaqueTerm,)*) -> ErlangResult;
/// This type represents a tuple of A arguments
pub type Args~A = (#(OpaqueTerm,)*);
impl FnOnce<Args~A> for &Closure {
type Output = ErlangResult;
#[inline]
extern "rust-call" fn call_once(self, _args: Args~A) -> Self::Output {
if self.is_thin() {
assert_eq!(self.arity, A, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Fun~A>(self.fun) };
fun(#(_args.N,)*)
} else {
assert_eq!(self.arity, A + 1, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Closure~A>(self.fun) };
fun(self, #(_args.N,)*)
}
}
}
impl FnMut<Args~A> for &Closure {
#[inline]
extern "rust-call" fn call_mut(&mut self, _args: Args~A) -> Self::Output {
if self.is_thin() {
assert_eq!(self.arity, A, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Fun~A>(self.fun) };
fun(#(_args.N,)*)
} else {
assert_eq!(self.arity, A + 1, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Closure~A>(self.fun) };
fun(self, #(_args.N,)*)
}
}
}
impl Fn<Args~A> for &Closure {
#[inline]
extern "rust-call" fn call(&self, _args: Args~A) -> Self::Output {
if self.is_thin() {
assert_eq!(self.arity, A, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Fun~A>(self.fun) };
fun(#(_args.N,)*)
} else {
assert_eq!(self.arity, A + 1, "mismatched arity");
let fun = unsafe { core::mem::transmute::<_, Closure~A>(self.fun) };
fun(self, #(_args.N,)*)
}
}
}
/// Applies the given slice of arguments to a function of arity A
///
/// NOTE: This function asserts that the length of `args` matches the arity of `fun`,
/// if they do not match the function panics.
#[inline]
pub fn apply~A<F>(fun: F, _args: &[OpaqueTerm]) -> ErlangResult
where
F: Fn(#(OpaqueTerm,)*) -> ErlangResult,
{
assert_eq!(_args.len(), A, "mismatched arity");
fun(#(_args[N],)*)
}
});
)*
});
impl Eq for Closure {}
impl PartialEq for Closure {
fn eq(&self, other: &Self) -> bool {
self.module == other.module
&& self.name == other.name
&& self.arity == other.arity
&& core::ptr::eq(self.fun, other.fun)
}
}
impl PartialOrd for Closure {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Closure {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
use core::cmp::Ordering;
match self.module.cmp(&other.module) {
Ordering::Equal => match self.name.cmp(&other.name) {
Ordering::Equal => self.arity.cmp(&other.arity),
other => other,
},
other => other,
}
}
}
impl Hash for Closure {
fn hash<H: Hasher>(&self, state: &mut H) {
self.module.hash(state);
self.name.hash(state);
self.arity.hash(state);
ptr::hash(self.fun, state);
}
}
|
// Copyright 2019-2020 PolkaX. Licensed under MIT or Apache-2.0.
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::RwLock;
use block_format::Block;
use cid::Codec;
use crate::error::{FormatError, Result};
use crate::format::Node;
lazy_static::lazy_static! {
static ref BLOCK_DECODERS: RwLock<HashMap<Codec, Arc<DecodeBlockFunc>>> = RwLock::new(HashMap::new());
}
type DecodeBlockFunc = dyn Fn(&dyn Block) -> Result<Box<dyn Node>> + Send + Sync;
/// Register decoder for all blocks with the passed codec.
///
/// This will silently replace any existing registered block decoders.
pub fn register<F>(codec: Codec, decoder: F)
where
F: Fn(&dyn Block) -> Result<Box<dyn Node>> + Send + Sync + 'static,
{
let mut block_decoders = BLOCK_DECODERS
.write()
.expect("get instance write lock failed");
block_decoders.insert(codec, Arc::new(decoder));
}
/// Decode block into node with the decode function corresponding to the codec of the block's CID.
pub fn decode(block: &impl Block) -> Result<Box<dyn Node>> {
let codec = block.cid().codec();
let decoder_func = {
// just get lock and release, let decode function could be parallel
let block_decoders = BLOCK_DECODERS
.read()
.expect("get instance read lock failed");
// get a copy of arc pointer
block_decoders
.get(&codec)
.ok_or(FormatError::DecoderNotRegister(codec))?
.clone()
};
decoder_func(block)
}
|
use aoc2019::aoc_input::get_input;
use num_integer::Integer;
use std::cmp::min;
use std::collections::HashMap;
use std::num::ParseIntError;
use std::str::FromStr;
#[derive(Debug, Clone, PartialEq, Eq)]
enum ParseError {
BadComponentCount,
ParseIntError(ParseIntError),
}
impl From<ParseIntError> for ParseError {
fn from(err: ParseIntError) -> Self {
ParseError::ParseIntError(err)
}
}
#[derive(Debug, Clone)]
struct ReactionElement {
chemical: String,
quantity: usize,
}
impl FromStr for ReactionElement {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts: Vec<_> = s.trim().split(' ').collect();
match &parts[..] {
[quantity, chemical] => Ok(ReactionElement {
chemical: chemical.to_string(),
quantity: quantity.parse()?,
}),
_ => Err(ParseError::BadComponentCount),
}
}
}
#[derive(Debug, Clone)]
struct Reaction {
output: ReactionElement,
inputs: Vec<ReactionElement>,
}
impl FromStr for Reaction {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts: Vec<_> = s.trim().split(" => ").collect();
match &parts[..] {
[inputs, output] => {
let output: ReactionElement = output.parse()?;
let inputs: Result<Vec<ReactionElement>, _> =
inputs.split(", ").map(|s| s.parse()).collect();
Ok(Reaction {
output: output,
inputs: inputs?,
})
}
_ => Err(ParseError::BadComponentCount),
}
}
}
#[derive(Debug, Clone)]
struct ReactionsMap(HashMap<String, Reaction>);
impl FromStr for ReactionsMap {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let reactions: Result<Vec<Reaction>, _> = s.trim().lines().map(|s| s.parse()).collect();
Ok(ReactionsMap(
reactions?
.drain(..)
.map(|r| (r.output.chemical.clone(), r))
.collect(),
))
}
}
#[derive(Debug, Clone)]
struct Inventory(HashMap<String, usize>);
impl Inventory {
fn with_chemical(chemical: &str, quantity: usize) -> Self {
let mut map = HashMap::new();
map.insert(chemical.to_owned(), quantity);
Inventory(map)
}
fn current_amount(&self, chemical: &str) -> usize {
self.0.get(chemical).cloned().unwrap_or(0)
}
}
static ORE: &str = "ORE";
static FUEL: &str = "FUEL";
#[derive(Debug)]
struct NanoFactory {
reactions: ReactionsMap,
inventory: Inventory,
}
impl NanoFactory {
fn new(reactions: ReactionsMap, inventory: Inventory) -> Self {
NanoFactory {
reactions,
inventory,
}
}
fn try_produce(&mut self, chemical: &str, quantity: usize) -> Option<()> {
if quantity == 0 {
return Some(());
}
let mut requirements = vec![(chemical.to_string(), quantity)];
let mut inventory = self.inventory.clone();
while !requirements.is_empty() {
let (chemical, mut required_quantity) = requirements.pop().unwrap();
assert_ne!(required_quantity, 0);
if let Some(available) = inventory.0.get_mut(&chemical) {
let take = min(*available, required_quantity);
*available -= take;
required_quantity -= take;
}
if required_quantity == 0 {
continue;
}
if chemical == ORE {
// Out of ore
return None;
}
let reaction = self.reactions.0.get(&chemical).unwrap();
let reaction_count = required_quantity.div_ceil(&reaction.output.quantity);
requirements.extend(
reaction
.inputs
.iter()
.map(|entry| (entry.chemical.clone(), entry.quantity * reaction_count)),
);
let leftovers = reaction_count * reaction.output.quantity - required_quantity;
*inventory.0.entry(chemical).or_insert(0) += leftovers;
}
self.inventory = inventory;
Some(())
}
}
fn compute_fuel_ore_cost(reactions: ReactionsMap) -> usize {
let initial_ore = std::usize::MAX;
let inventory = Inventory::with_chemical(ORE, initial_ore);
let mut factory = NanoFactory::new(reactions, inventory);
factory.try_produce(FUEL, 1).unwrap();
initial_ore - factory.inventory.current_amount(ORE)
}
fn maximum_fuel_for_ore_quantity(reactions: ReactionsMap, ore_quantity: usize) -> usize {
let inventory = Inventory::with_chemical(ORE, ore_quantity);
let mut factory = NanoFactory::new(reactions, inventory);
let mut produced = 0;
let mut batch = 0x8000;
while batch != 0 {
while let Some(()) = factory.try_produce(FUEL, batch) {
produced += batch;
}
batch /= 2;
}
produced
}
fn main() {
let input = get_input(14);
let reactions: ReactionsMap = input.parse().expect("Malformed ReactionsMap");
println!(
"Fuel ore cost: {}",
compute_fuel_ore_cost(reactions.clone())
);
let collected_ore = 1000000000000;
let max_fuel = maximum_fuel_for_ore_quantity(reactions, collected_ore);
println!("Maximum fuel for {} ore: {}", collected_ore, max_fuel);
}
|
use spin::rw_lock::RwLock;
use crate::executor::EXECUTOR;
use crate::prelude::*;
use crate::sched::Affinity;
/// A per-task scheduling-related info.
pub struct SchedInfo {
last_thread_id: AtomicU32,
affinity: RwLock<Affinity>,
}
impl SchedInfo {
pub fn new() -> Self {
static LAST_THREAD_ID: AtomicU32 = AtomicU32::new(0);
let last_thread_id = {
let last_thread_id =
LAST_THREAD_ID.fetch_add(1, Ordering::Relaxed) % EXECUTOR.parallelism();
AtomicU32::new(last_thread_id)
};
let affinity = RwLock::new(Affinity::new_full());
Self {
last_thread_id,
affinity,
}
}
pub fn last_thread_id(&self) -> u32 {
self.last_thread_id.load(Ordering::Relaxed)
}
pub fn set_last_thread_id(&self, id: u32) {
self.last_thread_id.store(id, Ordering::Relaxed);
}
pub fn affinity(&self) -> &RwLock<Affinity> {
&self.affinity
}
}
|
//! Basic formula data types used by the Varisat SAT solver.
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! lit {
($x:expr) => {
$crate::lit::Lit::from_dimacs($x)
};
}
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! var {
($x:expr) => {
$crate::lit::Var::from_dimacs($x)
};
}
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! lits {
( $( $x:expr ),* ) => { [ $( $crate::lit!( $x ) ),* ] };
( $( $x:expr ),* , ) => { $crate::lits! [ $( $ x),* ] };
}
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! vars {
( $( $x:expr ),* ) => { [ $( $crate::var!( $x ) ),* ] };
( $( $x:expr ),* , ) => { $crate::vars! [ $( $ x),* ] };
}
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! cnf {
( $( $( $x:expr ),* );* ; ) => { [ $( &[ $( $crate::lit!( $x ) ),* ] as &[$crate::Lit] ),* ] };
}
/// Shortcut for tests
#[cfg(any(test, feature = "internal-testing"))]
#[doc(hidden)]
#[macro_export]
macro_rules! cnf_formula {
( $( $t:tt )* ) => { $crate::cnf::CnfFormula::from($crate::cnf![ $($t)* ].iter().cloned()) };
}
pub mod cnf;
pub mod lit;
#[cfg(any(test, feature = "internal-testing"))]
pub mod test;
pub use cnf::{CnfFormula, ExtendFormula};
pub use lit::{Lit, Var};
|
#[doc = "Reader of register AHBSMENR"]
pub type R = crate::R<u32, super::AHBSMENR>;
#[doc = "Writer for register AHBSMENR"]
pub type W = crate::W<u32, super::AHBSMENR>;
#[doc = "Register AHBSMENR `reset()`'s with value 0x0111_1301"]
impl crate::ResetValue for super::AHBSMENR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0111_1301
}
}
#[doc = "Crypto clock enable during sleep mode bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRYPSMEN_A {
#[doc = "0: Crypto clock disabled in Sleep mode"]
DISABLED = 0,
#[doc = "1: Crypto clock enabled in Sleep mode"]
ENABLED = 1,
}
impl From<CRYPSMEN_A> for bool {
#[inline(always)]
fn from(variant: CRYPSMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `CRYPSMEN`"]
pub type CRYPSMEN_R = crate::R<bool, CRYPSMEN_A>;
impl CRYPSMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CRYPSMEN_A {
match self.bits {
false => CRYPSMEN_A::DISABLED,
true => CRYPSMEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == CRYPSMEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == CRYPSMEN_A::ENABLED
}
}
#[doc = "Write proxy for field `CRYPSMEN`"]
pub struct CRYPSMEN_W<'a> {
w: &'a mut W,
}
impl<'a> CRYPSMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CRYPSMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Crypto clock disabled in Sleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(CRYPSMEN_A::DISABLED)
}
#[doc = "Crypto clock enabled in Sleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(CRYPSMEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "CRC clock enable during sleep mode bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRCSMEN_A {
#[doc = "0: Test integration module clock disabled in Sleep mode"]
DISABLED = 0,
#[doc = "1: Test integration module clock enabled in Sleep mode (if enabled by CRCEN)"]
ENABLED = 1,
}
impl From<CRCSMEN_A> for bool {
#[inline(always)]
fn from(variant: CRCSMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `CRCSMEN`"]
pub type CRCSMEN_R = crate::R<bool, CRCSMEN_A>;
impl CRCSMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CRCSMEN_A {
match self.bits {
false => CRCSMEN_A::DISABLED,
true => CRCSMEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == CRCSMEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == CRCSMEN_A::ENABLED
}
}
#[doc = "Write proxy for field `CRCSMEN`"]
pub struct CRCSMEN_W<'a> {
w: &'a mut W,
}
impl<'a> CRCSMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CRCSMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Test integration module clock disabled in Sleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(CRCSMEN_A::DISABLED)
}
#[doc = "Test integration module clock enabled in Sleep mode (if enabled by CRCEN)"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(CRCSMEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "SRAM interface clock enable during sleep mode bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SRAMSMEN_A {
#[doc = "0: NVM interface clock disabled in Sleep mode"]
DISABLED = 0,
#[doc = "1: NVM interface clock enabled in Sleep mode"]
ENABLED = 1,
}
impl From<SRAMSMEN_A> for bool {
#[inline(always)]
fn from(variant: SRAMSMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SRAMSMEN`"]
pub type SRAMSMEN_R = crate::R<bool, SRAMSMEN_A>;
impl SRAMSMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SRAMSMEN_A {
match self.bits {
false => SRAMSMEN_A::DISABLED,
true => SRAMSMEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == SRAMSMEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == SRAMSMEN_A::ENABLED
}
}
#[doc = "Write proxy for field `SRAMSMEN`"]
pub struct SRAMSMEN_W<'a> {
w: &'a mut W,
}
impl<'a> SRAMSMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SRAMSMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "NVM interface clock disabled in Sleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(SRAMSMEN_A::DISABLED)
}
#[doc = "NVM interface clock enabled in Sleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(SRAMSMEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "NVM interface clock enable during sleep mode bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MIFSMEN_A {
#[doc = "0: NVM interface clock disabled in Sleep mode"]
DISABLED = 0,
#[doc = "1: NVM interface clock enabled in Sleep mode"]
ENABLED = 1,
}
impl From<MIFSMEN_A> for bool {
#[inline(always)]
fn from(variant: MIFSMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `MIFSMEN`"]
pub type MIFSMEN_R = crate::R<bool, MIFSMEN_A>;
impl MIFSMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MIFSMEN_A {
match self.bits {
false => MIFSMEN_A::DISABLED,
true => MIFSMEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == MIFSMEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == MIFSMEN_A::ENABLED
}
}
#[doc = "Write proxy for field `MIFSMEN`"]
pub struct MIFSMEN_W<'a> {
w: &'a mut W,
}
impl<'a> MIFSMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MIFSMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "NVM interface clock disabled in Sleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(MIFSMEN_A::DISABLED)
}
#[doc = "NVM interface clock enabled in Sleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(MIFSMEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "DMA clock enable during sleep mode bit\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMASMEN_A {
#[doc = "0: DMA clock disabled in Sleep mode"]
DISABLED = 0,
#[doc = "1: DMA clock enabled in Sleep mode"]
ENABLED = 1,
}
impl From<DMASMEN_A> for bool {
#[inline(always)]
fn from(variant: DMASMEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMASMEN`"]
pub type DMASMEN_R = crate::R<bool, DMASMEN_A>;
impl DMASMEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMASMEN_A {
match self.bits {
false => DMASMEN_A::DISABLED,
true => DMASMEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == DMASMEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == DMASMEN_A::ENABLED
}
}
#[doc = "Write proxy for field `DMASMEN`"]
pub struct DMASMEN_W<'a> {
w: &'a mut W,
}
impl<'a> DMASMEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMASMEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "DMA clock disabled in Sleep mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(DMASMEN_A::DISABLED)
}
#[doc = "DMA clock enabled in Sleep mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(DMASMEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 24 - Crypto clock enable during sleep mode bit"]
#[inline(always)]
pub fn crypsmen(&self) -> CRYPSMEN_R {
CRYPSMEN_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 12 - CRC clock enable during sleep mode bit"]
#[inline(always)]
pub fn crcsmen(&self) -> CRCSMEN_R {
CRCSMEN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 9 - SRAM interface clock enable during sleep mode bit"]
#[inline(always)]
pub fn sramsmen(&self) -> SRAMSMEN_R {
SRAMSMEN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - NVM interface clock enable during sleep mode bit"]
#[inline(always)]
pub fn mifsmen(&self) -> MIFSMEN_R {
MIFSMEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 0 - DMA clock enable during sleep mode bit"]
#[inline(always)]
pub fn dmasmen(&self) -> DMASMEN_R {
DMASMEN_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 24 - Crypto clock enable during sleep mode bit"]
#[inline(always)]
pub fn crypsmen(&mut self) -> CRYPSMEN_W {
CRYPSMEN_W { w: self }
}
#[doc = "Bit 12 - CRC clock enable during sleep mode bit"]
#[inline(always)]
pub fn crcsmen(&mut self) -> CRCSMEN_W {
CRCSMEN_W { w: self }
}
#[doc = "Bit 9 - SRAM interface clock enable during sleep mode bit"]
#[inline(always)]
pub fn sramsmen(&mut self) -> SRAMSMEN_W {
SRAMSMEN_W { w: self }
}
#[doc = "Bit 8 - NVM interface clock enable during sleep mode bit"]
#[inline(always)]
pub fn mifsmen(&mut self) -> MIFSMEN_W {
MIFSMEN_W { w: self }
}
#[doc = "Bit 0 - DMA clock enable during sleep mode bit"]
#[inline(always)]
pub fn dmasmen(&mut self) -> DMASMEN_W {
DMASMEN_W { w: self }
}
}
|
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
struct Element {
name: String,
quantity: i64,
}
impl Element {
fn new(name: String, quantity: i64) -> Element {
Element { name, quantity }
}
fn from_string(string: &str) -> Element {
let quantity_and_elem: Vec<&str> = string.split(' ').collect();
if quantity_and_elem.len() != 2 {
panic!("String doesn't contain quantity and element");
}
Element::new(quantity_and_elem[1].to_string(), quantity_and_elem[0].parse().unwrap())
}
}
pub fn run_puzzle() {
let mut file = File::open("input_day14.txt").expect("Failed to open input_day14.txt");
let mut reacts_string = String::new();
file.read_to_string(&mut reacts_string).unwrap();
let mut reactions: Vec<(Element, Vec<Element>)> = Vec::new();
for line in reacts_string.lines() {
let elems_and_res: Vec<String> = line.split("=>").map(|l| l.trim().to_string()).collect();
if elems_and_res.len() != 2 {
panic!("len() != 2");
}
let elements: Vec<Element> = elems_and_res[0].split(',').map(|e| Element::from_string(e.trim())).collect();
let result = Element::from_string(&elems_and_res[1]);
reactions.push((result, elements));
}
let mut elements_needed: HashMap<String, i64> = HashMap::new();
elements_needed.insert("FUEL".to_string(), 1);
let mut finished = false;
while !finished {
let mut new_elements = elements_needed.clone();
for (e, quantity) in &elements_needed {
if *e == "ORE" || *quantity <= 0 {
continue;
}
for r in &reactions {
if r.0.name == *e {
let num_reactions;
if *quantity % r.0.quantity != 0 {
num_reactions = (*quantity / r.0.quantity) + 1;
} else {
num_reactions = *quantity / r.0.quantity;
}
for n in &r.1 {
let elem_entry = new_elements.entry(n.name.to_string()).or_insert(0);
*elem_entry += num_reactions * n.quantity;
}
let res_entry = new_elements.entry(e.to_string()).or_insert(0);
*res_entry -= num_reactions * r.0.quantity;
}
}
}
elements_needed = new_elements;
finished = true;
for (e, quantity) in &elements_needed {
if e == "ORE" {
continue;
}
if *quantity > 0 {
finished = false;
break;
}
}
}
println!("Needs {} ORE", elements_needed[&"ORE".to_string()]);
}
|
//! Data point building and writing
use snafu::{ensure, Snafu};
use std::{collections::BTreeMap, io};
/// Errors that occur while building `DataPoint`s
#[derive(Debug, Snafu)]
pub enum DataPointError {
/// Returned when calling `build` on a `DataPointBuilder` that has no
/// fields.
#[snafu(display(
"All `DataPoints` must have at least one field. Builder contains: {:?}",
data_point_builder
))]
AtLeastOneFieldRequired {
/// The current state of the `DataPointBuilder`
data_point_builder: DataPointBuilder,
},
}
/// Incrementally constructs a `DataPoint`.
///
/// Create this via `DataPoint::builder`.
#[derive(Debug)]
pub struct DataPointBuilder {
measurement: String,
// Keeping the tags sorted improves performance on the server side
tags: BTreeMap<String, String>,
fields: BTreeMap<String, FieldValue>,
timestamp: Option<i64>,
}
impl DataPointBuilder {
fn new(measurement: impl Into<String>) -> Self {
Self {
measurement: measurement.into(),
tags: Default::default(),
fields: Default::default(),
timestamp: Default::default(),
}
}
/// Sets a tag, replacing any existing tag of the same name.
pub fn tag(mut self, name: impl Into<String>, value: impl Into<String>) -> Self {
self.tags.insert(name.into(), value.into());
self
}
/// Sets a field, replacing any existing field of the same name.
pub fn field(mut self, name: impl Into<String>, value: impl Into<FieldValue>) -> Self {
self.fields.insert(name.into(), value.into());
self
}
/// Sets the timestamp, replacing any existing timestamp.
///
/// The value is treated as the number of nanoseconds since the
/// UNIX epoch.
pub fn timestamp(mut self, value: i64) -> Self {
self.timestamp = Some(value);
self
}
/// Constructs the data point
pub fn build(self) -> Result<DataPoint, DataPointError> {
ensure!(
!self.fields.is_empty(),
AtLeastOneFieldRequiredSnafu {
data_point_builder: self
}
);
let Self {
measurement,
tags,
fields,
timestamp,
} = self;
Ok(DataPoint {
measurement,
tags,
fields,
timestamp,
})
}
}
/// A single point of information to send to InfluxDB.
// TODO: If we want to support non-UTF-8 data, all `String`s stored in `DataPoint` would need
// to be `Vec<u8>` instead, the API for creating a `DataPoint` would need some more consideration,
// and there would need to be more `Write*` trait implementations. Because the `Write*` traits work
// on a writer of bytes, that part of the design supports non-UTF-8 data now.
#[derive(Debug)]
pub struct DataPoint {
measurement: String,
tags: BTreeMap<String, String>,
fields: BTreeMap<String, FieldValue>,
timestamp: Option<i64>,
}
impl DataPoint {
/// Create a builder to incrementally construct a `DataPoint`.
pub fn builder(measurement: impl Into<String>) -> DataPointBuilder {
DataPointBuilder::new(measurement)
}
}
impl WriteDataPoint for DataPoint {
fn write_data_point_to<W>(&self, mut w: W) -> io::Result<()>
where
W: io::Write,
{
self.measurement.write_measurement_to(&mut w)?;
for (k, v) in &self.tags {
w.write_all(b",")?;
k.write_tag_key_to(&mut w)?;
w.write_all(b"=")?;
v.write_tag_value_to(&mut w)?;
}
for (i, (k, v)) in self.fields.iter().enumerate() {
let d = if i == 0 { b" " } else { b"," };
w.write_all(d)?;
k.write_field_key_to(&mut w)?;
w.write_all(b"=")?;
v.write_field_value_to(&mut w)?;
}
if let Some(ts) = self.timestamp {
w.write_all(b" ")?;
ts.write_timestamp_to(&mut w)?;
}
w.write_all(b"\n")?;
Ok(())
}
}
/// Possible value types
#[derive(Debug, Clone, PartialEq)]
pub enum FieldValue {
/// A true or false value
Bool(bool),
/// A 64-bit floating point number
F64(f64),
/// A 64-bit signed integer number
I64(i64),
/// A 64-bit unsigned integer number
U64(u64),
/// A string value
String(String),
}
impl From<bool> for FieldValue {
fn from(other: bool) -> Self {
Self::Bool(other)
}
}
impl From<f64> for FieldValue {
fn from(other: f64) -> Self {
Self::F64(other)
}
}
impl From<i64> for FieldValue {
fn from(other: i64) -> Self {
Self::I64(other)
}
}
impl From<u64> for FieldValue {
fn from(other: u64) -> Self {
Self::U64(other)
}
}
impl From<&str> for FieldValue {
fn from(other: &str) -> Self {
Self::String(other.into())
}
}
impl From<String> for FieldValue {
fn from(other: String) -> Self {
Self::String(other)
}
}
/// Transform a type into valid line protocol lines
///
/// This trait is to enable the conversion of `DataPoint`s to line protocol; it
/// is unlikely that you would need to implement this trait. In the future, a
/// `derive` crate may exist that would facilitate the generation of
/// implementations of this trait on custom types to help uphold the
/// responsibilities for escaping and producing complete lines.
pub trait WriteDataPoint {
/// Write this data point as line protocol. The implementor is responsible
/// for properly escaping the data and ensuring that complete lines
/// are generated.
fn write_data_point_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
// The following are traits rather than free functions so that we can limit
// their implementations to only the data types supported for each of
// measurement, tag key, tag value, field key, field value, and timestamp. They
// are a private implementation detail and any custom implementations
// of these traits would be generated by a future derive trait.
trait WriteMeasurement {
fn write_measurement_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteMeasurement for str {
fn write_measurement_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write,
{
escape_and_write_value(self, MEASUREMENT_DELIMITERS, w)
}
}
trait WriteTagKey {
fn write_tag_key_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteTagKey for str {
fn write_tag_key_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write,
{
escape_and_write_value(self, TAG_KEY_DELIMITERS, w)
}
}
trait WriteTagValue {
fn write_tag_value_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteTagValue for str {
fn write_tag_value_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write,
{
escape_and_write_value(self, TAG_VALUE_DELIMITERS, w)
}
}
trait WriteFieldKey {
fn write_field_key_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteFieldKey for str {
fn write_field_key_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write,
{
escape_and_write_value(self, FIELD_KEY_DELIMITERS, w)
}
}
trait WriteFieldValue {
fn write_field_value_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteFieldValue for FieldValue {
fn write_field_value_to<W>(&self, mut w: W) -> io::Result<()>
where
W: io::Write,
{
use FieldValue::*;
match self {
Bool(v) => write!(w, "{}", if *v { "t" } else { "f" }),
F64(v) => write!(w, "{v}"),
I64(v) => write!(w, "{v}i"),
U64(v) => write!(w, "{v}u"),
String(v) => {
w.write_all(br#"""#)?;
escape_and_write_value(v, FIELD_VALUE_STRING_DELIMITERS, &mut w)?;
w.write_all(br#"""#)
}
}
}
}
trait WriteTimestamp {
fn write_timestamp_to<W>(&self, w: W) -> io::Result<()>
where
W: io::Write;
}
impl WriteTimestamp for i64 {
fn write_timestamp_to<W>(&self, mut w: W) -> io::Result<()>
where
W: io::Write,
{
write!(w, "{self}")
}
}
const MEASUREMENT_DELIMITERS: &[char] = &[',', ' '];
const TAG_KEY_DELIMITERS: &[char] = &[',', '=', ' '];
const TAG_VALUE_DELIMITERS: &[char] = TAG_KEY_DELIMITERS;
const FIELD_KEY_DELIMITERS: &[char] = TAG_KEY_DELIMITERS;
const FIELD_VALUE_STRING_DELIMITERS: &[char] = &['"'];
fn escape_and_write_value<W>(
value: &str,
escaping_specification: &[char],
mut w: W,
) -> io::Result<()>
where
W: io::Write,
{
let mut last = 0;
for (idx, delim) in value.match_indices(escaping_specification) {
let s = &value[last..idx];
write!(w, r#"{s}\{delim}"#)?;
last = idx + delim.len();
}
w.write_all(value[last..].as_bytes())
}
#[cfg(test)]
mod tests {
use super::*;
use std::str;
fn assert_utf8_strings_eq(left: &[u8], right: &[u8]) {
assert_eq!(
left,
right,
"\n\nleft string value: `{}`,\nright string value: `{}`",
str::from_utf8(left).unwrap(),
str::from_utf8(right).unwrap(),
);
}
#[test]
fn point_builder_allows_setting_tags_and_fields() {
let point = DataPoint::builder("swap")
.tag("host", "server01")
.tag("name", "disk0")
.field("in", 3_i64)
.field("out", 4_i64)
.timestamp(1)
.build()
.unwrap();
assert_utf8_strings_eq(
&point.data_point_to_vec().unwrap(),
b"swap,host=server01,name=disk0 in=3i,out=4i 1\n".as_ref(),
);
}
#[test]
fn no_tags_or_timestamp() {
let point = DataPoint::builder("m0")
.field("f0", 1.0)
.field("f1", 2_i64)
.build()
.unwrap();
assert_utf8_strings_eq(
&point.data_point_to_vec().unwrap(),
b"m0 f0=1,f1=2i\n".as_ref(),
);
}
#[test]
fn no_timestamp() {
let point = DataPoint::builder("m0")
.tag("t0", "v0")
.tag("t1", "v1")
.field("f1", 2_i64)
.build()
.unwrap();
assert_utf8_strings_eq(
&point.data_point_to_vec().unwrap(),
b"m0,t0=v0,t1=v1 f1=2i\n".as_ref(),
);
}
#[test]
fn no_field() {
let point_result = DataPoint::builder("m0").build();
assert!(point_result.is_err());
}
const ALL_THE_DELIMITERS: &str = r#"alpha,beta=delta gamma"epsilon"#;
#[test]
fn special_characters_are_escaped_in_measurements() {
assert_utf8_strings_eq(
&ALL_THE_DELIMITERS.measurement_to_vec().unwrap(),
br#"alpha\,beta=delta\ gamma"epsilon"#.as_ref(),
);
}
#[test]
fn special_characters_are_escaped_in_tag_keys() {
assert_utf8_strings_eq(
&ALL_THE_DELIMITERS.tag_key_to_vec().unwrap(),
br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(),
);
}
#[test]
fn special_characters_are_escaped_in_tag_values() {
assert_utf8_strings_eq(
&ALL_THE_DELIMITERS.tag_value_to_vec().unwrap(),
br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(),
);
}
#[test]
fn special_characters_are_escaped_in_field_keys() {
assert_utf8_strings_eq(
&ALL_THE_DELIMITERS.field_key_to_vec().unwrap(),
br#"alpha\,beta\=delta\ gamma"epsilon"#.as_ref(),
);
}
#[test]
fn special_characters_are_escaped_in_field_values_of_strings() {
assert_utf8_strings_eq(
&FieldValue::from(ALL_THE_DELIMITERS)
.field_value_to_vec()
.unwrap(),
br#""alpha,beta=delta gamma\"epsilon""#.as_ref(),
);
}
#[test]
fn field_value_of_bool() {
let e = FieldValue::from(true);
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"t");
let e = FieldValue::from(false);
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"f");
}
#[test]
fn field_value_of_float() {
let e = FieldValue::from(42_f64);
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"42");
}
#[test]
fn field_value_of_signed_integer() {
let e = FieldValue::from(42_i64);
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"42i");
}
#[test]
fn field_value_of_unsigned_integer() {
let e = FieldValue::from(42_u64);
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), b"42u");
}
#[test]
fn field_value_of_string() {
let e = FieldValue::from("hello");
assert_utf8_strings_eq(&e.field_value_to_vec().unwrap(), br#""hello""#);
}
// Clears up the boilerplate of writing to a vector from the tests
macro_rules! test_extension_traits {
($($ext_name:ident :: $ext_fn_name:ident -> $base_name:ident :: $base_fn_name:ident,)*) => {
$(
trait $ext_name: $base_name {
fn $ext_fn_name(&self) -> io::Result<Vec<u8>> {
let mut v = Vec::new();
self.$base_fn_name(&mut v)?;
Ok(v)
}
}
impl<T: $base_name + ?Sized> $ext_name for T {}
)*
}
}
test_extension_traits! {
WriteDataPointExt::data_point_to_vec -> WriteDataPoint::write_data_point_to,
WriteMeasurementExt::measurement_to_vec -> WriteMeasurement::write_measurement_to,
WriteTagKeyExt::tag_key_to_vec -> WriteTagKey::write_tag_key_to,
WriteTagValueExt::tag_value_to_vec -> WriteTagValue::write_tag_value_to,
WriteFieldKeyExt::field_key_to_vec -> WriteFieldKey::write_field_key_to,
WriteFieldValueExt::field_value_to_vec -> WriteFieldValue::write_field_value_to,
}
}
|
// TODO: synchronize, not synchronize.
//
// synchronize animation have a loop duration that is multiple of tempo (eg 0.5 tempo or 4 tempo)
// then its image is computed from percentage of tempo and number of frame
//
// not sync have a framerate
use std::collections::HashMap;
include!(concat!(env!("OUT_DIR"), "/animations.rs"));
const FRAME_DURATION: f64 = 0.2;
lazy_static! {
static ref ANIMATION_MAP: HashMap<(Entity, State), Vec<u32>> = {
use self::Entity::*;
use self::State::*;
let mut map = HashMap::new();
macro_rules! animation {($e:ident, $s:ident => $i:ident) => { map.insert(($e, $s), $i.iter().cloned().collect()); }}
macro_rules! transition {($e:ident, $s:ident => $i:ident) => { map.insert(($e, $s), $i.iter().cloned().rev().collect()); }}
animation!(Character, Walking => ESSAI);
transition!(Character, Running => TRUC);
map
};
}
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub enum Entity {
Character,
Monster,
}
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub enum State {
Walking,
Running,
}
pub struct Animated {
entity: Entity,
tile_index: usize,
tiles: Vec<u32>,
transition_tiles: Vec<u32>,
timer: f64,
}
impl Animated {
pub fn new(entity: Entity, state: State) -> Self {
let mut animated = Animated {
entity,
tile_index: 0,
tiles: vec!(),
transition_tiles: vec!(),
timer: 0.0,
};
animated.set_state(state);
animated
}
pub fn update(&mut self, dt: f64) {
self.timer += dt;
while self.timer > FRAME_DURATION {
self.timer -= FRAME_DURATION;
if self.transition_tiles.pop().is_some() {
continue;
}
self.tile_index += 1;
self.tile_index %= self.tiles.len();
}
}
pub fn tile(&self) -> u32 {
self.transition_tiles.last().cloned().unwrap_or_else(|| {
self.tiles[self.tile_index].clone()
})
}
pub fn set_state(&mut self, state: State) {
let desc = ANIMATION_MAP.get(&(self.entity, state)).unwrap();
self.tiles.clear();
self.tiles.extend_from_slice(&desc);
self.tile_index = 0;
}
pub fn set_transition(&mut self, state: State) {
let desc = ANIMATION_MAP.get(&(self.entity, state)).unwrap();
self.transition_tiles.clear();
self.transition_tiles.extend_from_slice(&desc);
}
}
|
//! Decision heuristics.
use partial_ref::{partial, PartialRef};
use varisat_formula::Var;
use crate::{
context::{parts::*, Context},
prop::{enqueue_assignment, Reason},
};
pub mod vsids;
/// Make a decision and enqueue it.
///
/// Returns `false` if no decision was made because all variables are assigned.
pub fn make_decision(
mut ctx: partial!(
Context,
mut AssignmentP,
mut ImplGraphP,
mut TrailP,
mut VsidsP
),
) -> bool {
let (vsids, mut ctx) = ctx.split_part_mut(VsidsP);
if let Some(decision_var) = vsids.find(|&var| ctx.part(AssignmentP).var_value(var).is_none()) {
let decision = decision_var.lit(ctx.part(AssignmentP).last_var_value(decision_var));
ctx.part_mut(TrailP).new_decision_level();
enqueue_assignment(ctx.borrow(), decision, Reason::Unit);
true
} else {
false
}
}
/// Make a variable available for decisions.
pub fn make_available(mut ctx: partial!(Context, mut VsidsP), var: Var) {
ctx.part_mut(VsidsP).make_available(var);
}
/// Initialize decision heuristics for a new variable.
pub fn initialize_var(mut ctx: partial!(Context, mut VsidsP), var: Var, available: bool) {
ctx.part_mut(VsidsP).reset(var);
if available {
make_available(ctx.borrow(), var);
}
}
/// Remove a variable from the decision heuristics.
pub fn remove_var(mut ctx: partial!(Context, mut VsidsP), var: Var) {
ctx.part_mut(VsidsP).make_unavailable(var);
}
|
// Copyright 2019 David Roundy <roundyd@physics.oregonstate.edu>
//
// Licensed under the GPL version 2.0 or later.
//! This crate defines a macro for a database.
extern crate proc_macro;
use syn::spanned::Spanned;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
enum Item {
Struct(syn::ItemStruct),
Enum(syn::ItemEnum),
}
impl syn::parse::Parse for Item {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let mut attrs = input.call(syn::Attribute::parse_outer)?;
let ahead = input.fork();
let vis: syn::Visibility = ahead.parse()?;
let lookahead = ahead.lookahead1();
let mut item = if lookahead.peek(syn::Token![struct]) {
input.parse().map(Item::Struct)
} else if lookahead.peek(syn::Token![enum]) {
input.parse().map(Item::Enum)
} else {
Err(lookahead.error())
}?;
{
let (item_vis, item_attrs, generics) = match &mut item {
Item::Struct(item) => (&mut item.vis, &mut item.attrs, &item.generics),
Item::Enum(item) => (&mut item.vis, &mut item.attrs, &item.generics),
};
if generics.params.len() > 0 {
return Err(syn::Error::new_spanned(
generics,
"schema! does not support generic types.",
));
}
attrs.extend(item_attrs.drain(..));
*item_attrs = attrs;
*item_vis = vis;
}
Ok(item)
}
}
#[derive(Debug)]
struct SchemaInput {
name: syn::Ident,
structs: Vec<syn::ItemStruct>,
enums: Vec<syn::ItemEnum>,
}
impl syn::parse::Parse for SchemaInput {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
input.parse::<syn::Token![type]>()?;
let name: syn::Ident = input.parse()?;
input.parse::<syn::Token![;]>()?;
let mut structs = Vec::new();
let mut enums = Vec::new();
while !input.is_empty() {
match input.parse()? {
Item::Struct(i) => structs.push(i),
Item::Enum(i) => enums.push(i),
}
}
Ok(SchemaInput {
name,
structs,
enums,
})
}
}
#[derive(Debug)]
struct SchemaOutput {
// name: syn::Ident,
pod_structs: Vec<syn::ItemStruct>,
pod_enums: Vec<syn::ItemEnum>,
key_structs: Vec<syn::ItemStruct>,
key_struct_maps: Vec<std::collections::HashMap<syn::Ident, KeyType>>,
// key_enums: Vec<syn::ItemEnum>,
}
// fn lifetime_a() -> syn::Generics {
// let mut params = syn::punctuated::Punctuated::new();
// params.push(syn::GenericParam::Lifetime(syn::LifetimeDef {
// attrs: Vec::new(),
// lifetime: syn::Lifetime {
// apostrophe: proc_macro2::Span::call_site(),
// ident: quote::format_ident!("a"),
// },
// colon_token: None,
// bounds: syn::punctuated::Punctuated::new(),
// }));
// syn::Generics {
// lt_token: Some(syn::Token)),
// params,
// gt_token: Some(syn::Token)),
// where_clause: None,
// }
// }
#[derive(Debug, Eq, PartialEq)]
enum KeyType {
Key(syn::Ident),
OptionKey(syn::Ident),
KeySet(syn::Ident),
}
impl KeyType {
fn key_to(&self) -> syn::Ident {
match self {
KeyType::Key(i) => i.clone(),
KeyType::OptionKey(i) => i.clone(),
KeyType::KeySet(i) => i.clone(),
}
}
}
fn first_of_type(t: &syn::Type) -> Option<(syn::Ident, syn::Type)> {
let p = if let syn::Type::Path(p) = t {
p
} else {
return None;
};
let path_count = p.path.segments.len();
if path_count != 1 {
return None;
}
let ident = p.path.segments.last().unwrap().clone().ident;
let path_only = p.path.segments.last().unwrap();
let args = if let syn::PathArguments::AngleBracketed(args) = &path_only.arguments {
args
} else {
return None;
};
if args.args.len() != 1 {
return None;
}
use syn::GenericArgument;
let t = if let GenericArgument::Type(t) = args.args.first()? {
t
} else {
return None;
};
Some((ident, t.clone()))
}
fn type_is_just_ident(t: &syn::Type) -> Option<syn::Ident> {
let p = if let syn::Type::Path(p) = t {
p
} else {
return None;
};
let path_count = p.path.segments.len();
if path_count != 1 {
return None;
}
let ident = p.path.segments.last().unwrap().clone().ident;
let path_only = p.path.segments.last().unwrap();
if path_only.arguments != syn::PathArguments::None {
return None;
}
Some(ident)
}
fn parse_keytype(t: &syn::Type) -> Result<Option<KeyType>, syn::Error> {
if let Some((key, t)) = first_of_type(&t) {
if key.to_string() == "Option" {
if let Some((key, t)) = first_of_type(&t) {
if key.to_string() == "Key" {
if let Some(i) = type_is_just_ident(&t) {
return Ok(Some(KeyType::OptionKey(i)));
} else {
return Err(syn::Error::new_spanned(
t,
"Key type should be a simple table name",
));
}
}
}
} else if key.to_string() == "KeySet" {
if let Some(i) = type_is_just_ident(&t) {
return Ok(Some(KeyType::KeySet(i)));
} else {
return Err(syn::Error::new_spanned(
t,
"Key type should be a simple table name",
));
}
}
}
if let syn::Type::Path(p) = t {
let path_count = p.path.segments.len();
// println!("path is {:#?}", p);
// println!("path_count is {:#?}", path_count);
if path_count == 1 {
let ident = p.path.segments.last().unwrap().clone().ident;
let path_only = p.path.segments.last().unwrap();
let name = ident.to_string();
// println!("path_only is {:#?}", name);
if name == "Option" {
let args = path_only.clone().arguments;
println!("args are {:#?}", args);
unimplemented!()
} else {
if name == "Key" {
if let syn::PathArguments::AngleBracketed(args) = &path_only.arguments {
if args.args.len() != 1 {
return Err(syn::Error::new_spanned(
t,
"Key should have just one type argument",
));
}
use syn::{GenericArgument, Type};
if let GenericArgument::Type(Type::Path(ap)) = args.args.first().unwrap() {
if ap.path.segments.len() != 1 {
return Err(syn::Error::new_spanned(
t,
"Key should have a simple type argument",
));
}
let tp = ap.path.segments.first().unwrap();
if !tp.arguments.is_empty() {
Err(syn::Error::new_spanned(
tp.arguments.clone(),
"Key type should be a simple table name",
))
} else {
let i = tp.ident.clone();
// args.args = [// syn::parse_quote!{K},
// args.args.first().unwrap().clone()]
// .into_iter().cloned().collect();
// println!("new args: {:?}", args.args);
Ok(Some(KeyType::Key(i)))
}
} else {
Err(syn::Error::new_spanned(
t,
"Key should have a simple type argument",
))
}
} else {
Err(syn::Error::new_spanned(t, "Key should be Key<ATableType>"))
}
} else {
Ok(None)
}
}
} else {
Ok(None)
}
} else {
Ok(None)
}
}
fn parse_fields(
f: &syn::FieldsNamed,
) -> Result<std::collections::HashMap<syn::Ident, KeyType>, syn::Error> {
let mut keymap = std::collections::HashMap::new();
for n in f.named.iter() {
if let Some(kt) = parse_keytype(&n.ty)? {
keymap.insert(n.ident.clone().unwrap(), kt);
}
}
Ok(keymap)
}
impl SchemaInput {
fn process(&self) -> Result<SchemaOutput, syn::Error> {
let mut tables = std::collections::HashSet::new();
tables.extend(self.structs.iter().map(|x| x.ident.clone()));
tables.extend(self.enums.iter().map(|x| x.ident.clone()));
let mut pod_structs = Vec::new();
let mut key_structs = Vec::new();
let mut key_struct_maps = Vec::new();
for x in self.structs.iter().cloned() {
match &x.fields {
syn::Fields::Named(n) => {
let keymap = parse_fields(n)?;
if keymap.len() > 0 {
key_struct_maps.push(keymap);
key_structs.push(x);
} else {
pod_structs.push(x);
}
}
syn::Fields::Unnamed(_) => {
pod_structs.push(x);
}
syn::Fields::Unit => {
pod_structs.push(x);
}
}
}
let pod_enums: Vec<_> = self
.enums
.iter()
.map(|x| {
let mut x = x.clone();
x.vis = syn::Visibility::Public(syn::VisPublic {
pub_token: syn::Token!(pub)(x.span()),
});
x
})
.collect();
Ok(SchemaOutput {
// name: self.name.clone(),
pod_structs,
key_structs,
key_struct_maps,
// key_enums: Vec::new(),
pod_enums,
})
}
}
#[proc_macro]
pub fn schema(raw_input: proc_macro::TokenStream) -> proc_macro::TokenStream {
// use heck::ShoutySnakeCase;
use heck::SnakeCase;
let input: SchemaInput = syn::parse_macro_input!(raw_input as SchemaInput);
// println!("input is {:#?}", input);
let output = match input.process() {
Err(e) => {
return e.to_compile_error().into();
}
Ok(v) => v,
};
// Here "pod" means "plain old data", and refers to tables that
// have no keys in them. When such tables exist, there is just
// one possible reason: We want to create a back hash so we can
// quickly search for all things that reference a given value,
// which means that we need to effectively intern those values.
// This also may save size, if the same large value is used many
// times in the database (essentially interning).
let pod_structs = &output.pod_structs;
let key_structs = &output.key_structs;
let key_names: Vec<_> = key_structs
.iter()
.map(|x| quote::format_ident!("{}", x.ident.to_string().to_snake_case()))
.collect();
let mut reverse_references = std::collections::HashMap::new();
for (map, t) in output.key_struct_maps.iter().zip(key_structs.iter()) {
// println!("hello we have {:?}", t);
for (k, v) in map.iter() {
let kt = v.key_to();
if !reverse_references.contains_key(&kt) {
reverse_references.insert(kt.clone(), Vec::new());
}
reverse_references
.get_mut(&kt)
.unwrap()
.push((t.ident.clone(), k.clone()));
}
}
// println!("\n\nreverse references are {:?}", reverse_references);
let mut pod_query_backrefs: Vec<Vec<(syn::Ident, syn::Ident)>> = Vec::new();
let pod_query_structs: Vec<syn::ItemStruct> = pod_structs
.iter()
.cloned()
.map(|mut x| {
let i = x.ident.clone();
let mut backrefs = Vec::new();
let mut backrefs_code = Vec::new();
if let Some(v) = reverse_references.get(&x.ident) {
for r in v.iter() {
let field = quote::format_ident!("{}_of", r.1.to_string().to_snake_case());
let t = &r.0;
backrefs.push((t.clone(), field.clone()));
let code = quote::quote! {
pub #field: KeySet<#t>,
};
// println!("\ncode is {:?}", code.to_string());
backrefs_code.push(code);
}
}
pod_query_backrefs.push(backrefs);
x.ident = quote::format_ident!("{}Query", x.ident);
x.fields = syn::Fields::Named(syn::parse_quote! {{
__data: #i,
#(#backrefs_code)*
}});
x
})
.collect();
let pod_query_types: Vec<syn::PathSegment> = pod_query_structs
.iter()
.map(|x| {
let i = x.ident.clone();
syn::parse_quote! {#i}
})
.collect();
let pod_query_new: Vec<_> = pod_query_structs
.iter()
.zip(pod_query_backrefs.iter())
.map(|(x, br)| {
let i = &x.ident;
let backcode = br.iter().map(|(t, f)| {
quote::quote! {
#f: KeySet::<#t>::new(),
}
});
quote::quote! {
#i {
__data: value,
#(#backcode)*
}
}
})
.collect();
let pod_names: Vec<_> = pod_structs
.iter()
.map(|x| quote::format_ident!("{}", x.ident.to_string().to_snake_case()))
.collect();
let pod_inserts: Vec<_> = pod_structs
.iter()
.map(|x| quote::format_ident!("insert_{}", x.ident.to_string().to_snake_case()))
.collect();
let pod_lookups: Vec<_> = pod_structs
.iter()
// only allow lookups on non-generic fields
.filter(|x| x.generics.params.len() == 0)
.map(|x| quote::format_ident!("lookup_{}", x.ident.to_string().to_snake_case()))
.collect();
let pod_lookup_hashes: Vec<_> = pod_structs
.iter()
// only allow lookups on non-generic fields
.filter(|x| x.generics.params.len() == 0)
.map(|x| quote::format_ident!("hash_{}", x.ident.to_string().to_snake_case()))
.collect();
let pod_types: Vec<syn::PathSegment> = pod_structs
.iter()
.map(|x| {
let i = x.ident.clone();
syn::parse_quote! {#i}
})
.collect();
let mut key_query_backrefs: Vec<Vec<(syn::Ident, syn::Ident)>> = Vec::new();
let key_query_structs: Vec<_> = key_structs
.iter()
.cloned()
.map(|mut x| {
let i = x.ident.clone();
let mut backrefs = Vec::new();
let mut backrefs_code = Vec::new();
if let Some(v) = reverse_references.get(&x.ident) {
for r in v.iter() {
let field = quote::format_ident!("{}_of", r.1.to_string().to_snake_case());
let t = &r.0;
backrefs.push((t.clone(), field.clone()));
let code = quote::quote! {
pub #field: KeySet<#t>,
};
// println!("\ncode is {:?}", code.to_string());
backrefs_code.push(code);
}
}
key_query_backrefs.push(backrefs);
x.ident = quote::format_ident!("{}Query", x.ident);
x.fields = syn::Fields::Named(syn::parse_quote! {{
__data: #i,
#(#backrefs_code)*
}});
x
})
.collect();
let key_query_types: Vec<syn::PathSegment> = key_query_structs
.iter()
.map(|x| {
let i = x.ident.clone();
let g = x.generics.clone();
syn::parse_quote! {#i#g}
})
.collect();
let key_inserts: Vec<_> = key_structs
.iter()
.map(|x| quote::format_ident!("insert_{}", x.ident.to_string().to_snake_case()))
.collect();
let key_insert_backrefs: Vec<_> = output
.key_struct_maps
.iter()
.enumerate()
.map(|(i, map)| {
let myname = &key_names[i];
let mut code = Vec::new();
// The following keys_and_types is simply used to ensure we generate
// a reproducible code. This shouldn't be needed for correctness,
// but when I had a bug it was a huge pain to have it randomly
// disappearing.
let mut keys_and_types = map.iter().collect::<Vec<_>>();
keys_and_types.sort_by_key(|a| a.0);
for (k, v) in keys_and_types.into_iter() {
// or just map.iter()
match v {
KeyType::Key(t) => {
let field = quote::format_ident!("{}", t.to_string().to_snake_case());
let rev = quote::format_ident!("{}_of", k.to_string().to_snake_case());
code.push(quote::quote! {
self.#field[self.#myname[idx].#k.0].#rev.insert(k);
});
}
KeyType::OptionKey(t) => {
let field = quote::format_ident!("{}", t.to_string().to_snake_case());
let rev = quote::format_ident!("{}_of", k.to_string().to_snake_case());
code.push(quote::quote! {
if let Some(idxk) = self.#myname[idx].#k {
self.#field[idxk.0].#rev.insert(k);
}
});
}
KeyType::KeySet(t) => {
let field = quote::format_ident!("{}", t.to_string().to_snake_case());
let rev = quote::format_ident!("{}_of", k.to_string().to_snake_case());
code.push(quote::quote! {
for idxk in self.#myname[idx].#k.iter() {
self.#field[idxk.0].#rev.insert(k);
}
});
}
}
}
quote::quote! {
#(#code)*
}
})
.collect();
let key_sets: Vec<_> = key_structs
.iter()
.map(|x| quote::format_ident!("set_{}", x.ident.to_string().to_snake_case()))
.collect();
let key_types: Vec<syn::PathSegment> = key_structs
.iter()
.map(|x| {
let i = x.ident.clone();
let g = x.generics.clone();
syn::parse_quote! {#i#g}
})
.collect();
// let save_enums = output.save_enums.iter();
let table_enums = output.pod_enums.iter();
// save_names.extend(
// output.save_enums.iter().map(|x| x.ident.clone()));
let name = &input.name;
// let savename = quote::format_ident!("{}Save", name);
let output = quote::quote! {
trait Query: std::ops::Deref {
fn new(val: Self::Target) -> Self;
}
trait HasQuery {
type Query: Query<Target=Self>;
}
#(
#[repr(C)]
#[derive(Eq,PartialEq,Hash,Clone)]
#pod_structs
#[repr(C)]
#[derive(Eq,PartialEq,Hash,Clone)]
/// This is plain old data.
#pod_query_structs
impl std::ops::Deref for #pod_query_types {
type Target = #pod_types;
fn deref(&self) -> &Self::Target {
&self.__data
}
}
impl Query for #pod_query_types {
fn new(value: Self::Target) -> Self {
// First pad the value with zeroes, then transmute
// to the query type. This relies on zero bytes
// being valid values for all extra fields in the
// query struct.
#pod_query_new
// let x = (value,
// [0u8; std::mem::size_of::<Self>() - std::mem::size_of::<Self::Target>()]);
// unsafe { std::mem::transmute(x) }
}
}
impl HasQuery for #pod_types {
type Query = #pod_query_types;
}
)*
#(
#[repr(C)]
#[derive(Clone)]
#key_structs
#[repr(C)]
#[derive(Clone)]
/// This table has keys to other tables
#key_query_structs
impl std::ops::Deref for #key_query_types {
type Target = #key_types;
fn deref(&self) -> &Self::Target {
unsafe { &*(self as *const Self as *const Self::Target) }
}
}
impl Query for #key_query_types {
fn new(value: Self::Target) -> Self {
// First pad the value with zeroes, then transmute
// to the query type. This relies on zero bytes
// being valid values for all extra fields in the
// query struct.
// unimplemented!()
let x = (value,
[0u8; std::mem::size_of::<Self>() - std::mem::size_of::<Self::Target>()]);
unsafe { std::mem::transmute(x) }
}
}
impl HasQuery for #key_types {
type Query = #key_query_types;
}
)*
#(
#[derive(Eq,PartialEq,Hash,Clone)]
#table_enums
)*
pub struct #name {
#(
pub #pod_names: Vec<#pod_query_types>,
)*
#(
pub #key_names: Vec<#key_query_types>,
)*
#(
pub #pod_lookup_hashes: std::collections::HashMap<#pod_types, usize>,
)*
}
impl #name {
/// Create an empty #name database.
pub fn new() -> Self {
#name {
#( #pod_names: Vec::new(), )*
#( #key_names: Vec::new(), )*
#(
#pod_lookup_hashes: std::collections::HashMap::new(),
)*
}
}
}
type Set64<K> = tinyset::Set64<K>;
type KeySet<T> = Set64<Key<T>>;
#[derive(Eq,PartialEq,Hash)]
pub struct Key<T>(usize, std::marker::PhantomData<T>);
impl<T> Copy for Key<T> {}
impl<T> Clone for Key<T> {
fn clone(&self) -> Self {
Key(self.0, self.1)
}
}
impl<T> tinyset::Fits64 for Key<T> {
unsafe fn from_u64(x: u64) -> Self {
Key(x as usize, std::marker::PhantomData)
}
fn to_u64(self) -> u64 {
self.0.to_u64()
}
}
impl #name {
#(
pub fn #pod_inserts(&mut self, datum: #pod_types) -> Key<#pod_types> {
let idx = self.#pod_names.len();
self.#pod_names.push(#pod_query_types::new(datum.clone()));
self.#pod_lookup_hashes.insert(datum, idx);
Key(idx, std::marker::PhantomData)
}
)*
#(
pub fn #key_inserts(&mut self, datum: #key_types) -> Key<#key_types> {
let idx = self.#key_names.len();
self.#key_names.push(#key_query_types::new(datum.clone()));
let k = Key(idx, std::marker::PhantomData);
#key_insert_backrefs
k
}
pub fn #key_sets(&mut self, k: Key<#key_types>, datum: #key_types) {
let old = std::mem::replace(&mut self.#key_names[k.0], #key_query_types::new(datum));
// FIXME need to modify any back references.
}
)*
#(
pub fn #pod_lookups(&self, datum: &#pod_types) -> Option<Key<#pod_types>> {
self.#pod_lookup_hashes.get(datum)
.map(|&i| Key(i, std::marker::PhantomData))
// self.0.#table_names.iter().enumerate()
// .filter(|&(_,x)| x == datum)
// .map(|(i,x)| Key(i, std::marker::PhantomData))
// .next()
}
)*
}
#(
impl Key<#pod_types> {
pub fn d<'a,'b>(&'a self, database: &'b #name) -> &'b #pod_query_types {
&database.#pod_names[self.0]
}
}
)*
#(
impl Key<#key_types> {
pub fn d<'a,'b>(&'a self, database: &'b #name) -> &'b #key_query_types {
&database.#key_names[self.0]
}
}
)*
#(
impl std::ops::Index<Key<#key_types>> for #name {
type Output = #key_query_types;
fn index(&self, index: Key<#key_types>) -> &Self::Output {
&self.#key_names[index.0]
}
}
)*
#(
impl std::ops::Index<Key<#pod_types>> for #name {
type Output = #pod_query_types;
fn index(&self, index: Key<#pod_types>) -> &Self::Output {
&self.#pod_names[index.0]
}
}
)*
};
// println!("\n\n\noutput is\n\n{}", output.to_string());
output.into()
}
|
#[path = "with_empty_list_arguments/with_empty_list_options.rs"]
mod with_empty_list_options;
#[path = "with_empty_list_arguments/with_link_and_monitor_in_options_list.rs"]
mod with_link_and_monitor_in_options_list;
#[path = "with_empty_list_arguments/with_link_in_options_list.rs"]
mod with_link_in_options_list;
#[path = "with_empty_list_arguments/with_monitor_in_options_list.rs"]
mod with_monitor_in_options_list;
test_stdout!(
without_proper_list_options_errors_badarg,
"{caught, error, badarg}\n"
);
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - DMAMux - DMA request line multiplexer channel x control register"]
pub c0cr: C0CR,
#[doc = "0x04 - DMAMux - DMA request line multiplexer channel x control register"]
pub c1cr: C1CR,
#[doc = "0x08 - DMAMux - DMA request line multiplexer channel x control register"]
pub c2cr: C2CR,
#[doc = "0x0c - DMAMux - DMA request line multiplexer channel x control register"]
pub c3cr: C3CR,
#[doc = "0x10 - DMAMux - DMA request line multiplexer channel x control register"]
pub c4cr: C4CR,
#[doc = "0x14 - DMAMux - DMA request line multiplexer channel x control register"]
pub c5cr: C5CR,
#[doc = "0x18 - DMAMux - DMA request line multiplexer channel x control register"]
pub c6cr: C6CR,
_reserved7: [u8; 228usize],
#[doc = "0x100 - DMAMux - DMA request generator channel x control register"]
pub rg0cr: RG0CR,
#[doc = "0x104 - DMAMux - DMA request generator channel x control register"]
pub rg1cr: RG1CR,
#[doc = "0x108 - DMAMux - DMA request generator channel x control register"]
pub rg2cr: RG2CR,
#[doc = "0x10c - DMAMux - DMA request generator channel x control register"]
pub rg3cr: RG3CR,
_reserved11: [u8; 48usize],
#[doc = "0x140 - DMAMux - DMA request generator status register"]
pub rgsr: RGSR,
#[doc = "0x144 - DMAMux - DMA request generator clear flag register"]
pub rgcfr: RGCFR,
}
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c0cr](c0cr) module"]
pub type C0CR = crate::Reg<u32, _C0CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C0CR;
#[doc = "`read()` method returns [c0cr::R](c0cr::R) reader structure"]
impl crate::Readable for C0CR {}
#[doc = "`write(|w| ..)` method takes [c0cr::W](c0cr::W) writer structure"]
impl crate::Writable for C0CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c0cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c1cr](c1cr) module"]
pub type C1CR = crate::Reg<u32, _C1CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C1CR;
#[doc = "`read()` method returns [c1cr::R](c1cr::R) reader structure"]
impl crate::Readable for C1CR {}
#[doc = "`write(|w| ..)` method takes [c1cr::W](c1cr::W) writer structure"]
impl crate::Writable for C1CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c1cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c2cr](c2cr) module"]
pub type C2CR = crate::Reg<u32, _C2CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C2CR;
#[doc = "`read()` method returns [c2cr::R](c2cr::R) reader structure"]
impl crate::Readable for C2CR {}
#[doc = "`write(|w| ..)` method takes [c2cr::W](c2cr::W) writer structure"]
impl crate::Writable for C2CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c2cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c3cr](c3cr) module"]
pub type C3CR = crate::Reg<u32, _C3CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C3CR;
#[doc = "`read()` method returns [c3cr::R](c3cr::R) reader structure"]
impl crate::Readable for C3CR {}
#[doc = "`write(|w| ..)` method takes [c3cr::W](c3cr::W) writer structure"]
impl crate::Writable for C3CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c3cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c4cr](c4cr) module"]
pub type C4CR = crate::Reg<u32, _C4CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C4CR;
#[doc = "`read()` method returns [c4cr::R](c4cr::R) reader structure"]
impl crate::Readable for C4CR {}
#[doc = "`write(|w| ..)` method takes [c4cr::W](c4cr::W) writer structure"]
impl crate::Writable for C4CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c4cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c5cr](c5cr) module"]
pub type C5CR = crate::Reg<u32, _C5CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C5CR;
#[doc = "`read()` method returns [c5cr::R](c5cr::R) reader structure"]
impl crate::Readable for C5CR {}
#[doc = "`write(|w| ..)` method takes [c5cr::W](c5cr::W) writer structure"]
impl crate::Writable for C5CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c5cr;
#[doc = "DMAMux - DMA request line multiplexer channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [c6cr](c6cr) module"]
pub type C6CR = crate::Reg<u32, _C6CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _C6CR;
#[doc = "`read()` method returns [c6cr::R](c6cr::R) reader structure"]
impl crate::Readable for C6CR {}
#[doc = "`write(|w| ..)` method takes [c6cr::W](c6cr::W) writer structure"]
impl crate::Writable for C6CR {}
#[doc = "DMAMux - DMA request line multiplexer channel x control register"]
pub mod c6cr;
#[doc = "DMAMux - DMA request generator channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rg0cr](rg0cr) module"]
pub type RG0CR = crate::Reg<u32, _RG0CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RG0CR;
#[doc = "`read()` method returns [rg0cr::R](rg0cr::R) reader structure"]
impl crate::Readable for RG0CR {}
#[doc = "`write(|w| ..)` method takes [rg0cr::W](rg0cr::W) writer structure"]
impl crate::Writable for RG0CR {}
#[doc = "DMAMux - DMA request generator channel x control register"]
pub mod rg0cr;
#[doc = "DMAMux - DMA request generator channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rg1cr](rg1cr) module"]
pub type RG1CR = crate::Reg<u32, _RG1CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RG1CR;
#[doc = "`read()` method returns [rg1cr::R](rg1cr::R) reader structure"]
impl crate::Readable for RG1CR {}
#[doc = "`write(|w| ..)` method takes [rg1cr::W](rg1cr::W) writer structure"]
impl crate::Writable for RG1CR {}
#[doc = "DMAMux - DMA request generator channel x control register"]
pub mod rg1cr;
#[doc = "DMAMux - DMA request generator channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rg2cr](rg2cr) module"]
pub type RG2CR = crate::Reg<u32, _RG2CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RG2CR;
#[doc = "`read()` method returns [rg2cr::R](rg2cr::R) reader structure"]
impl crate::Readable for RG2CR {}
#[doc = "`write(|w| ..)` method takes [rg2cr::W](rg2cr::W) writer structure"]
impl crate::Writable for RG2CR {}
#[doc = "DMAMux - DMA request generator channel x control register"]
pub mod rg2cr;
#[doc = "DMAMux - DMA request generator channel x control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rg3cr](rg3cr) module"]
pub type RG3CR = crate::Reg<u32, _RG3CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RG3CR;
#[doc = "`read()` method returns [rg3cr::R](rg3cr::R) reader structure"]
impl crate::Readable for RG3CR {}
#[doc = "`write(|w| ..)` method takes [rg3cr::W](rg3cr::W) writer structure"]
impl crate::Writable for RG3CR {}
#[doc = "DMAMux - DMA request generator channel x control register"]
pub mod rg3cr;
#[doc = "DMAMux - DMA request generator status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rgsr](rgsr) module"]
pub type RGSR = crate::Reg<u32, _RGSR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RGSR;
#[doc = "`read()` method returns [rgsr::R](rgsr::R) reader structure"]
impl crate::Readable for RGSR {}
#[doc = "DMAMux - DMA request generator status register"]
pub mod rgsr;
#[doc = "DMAMux - DMA request generator clear flag register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rgcfr](rgcfr) module"]
pub type RGCFR = crate::Reg<u32, _RGCFR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RGCFR;
#[doc = "`write(|w| ..)` method takes [rgcfr::W](rgcfr::W) writer structure"]
impl crate::Writable for RGCFR {}
#[doc = "DMAMux - DMA request generator clear flag register"]
pub mod rgcfr;
|
#[cfg(feature = "debug-view")]
mod debug_view;
#[cfg(not(feature = "debug-view"))]
mod dummy;
#[cfg(feature = "debug-view")]
pub use debug_view::*;
#[cfg(not(feature = "debug-view"))]
pub use dummy::*;
|
use ::ast;
use std::collections::HashMap;
use ::scope::{Scope, Variable, Function};
type PResult = Result<(),String>;
fn error(txt: &str) -> PResult {
Err(txt.to_string())
}
|
#![allow(dead_code)]
use std::{cell::RefCell, collections::HashSet, ops::Range, ptr::NonNull};
use rand;
use veclist::VecList;
use allocator::{ArenaConfig, DynamicConfig};
use block::Block;
use device::Device;
use error::{AllocationError, MappingError, MemoryError, OutOfMemoryError};
use heaps::{Config, Heaps, MemoryBlock};
use memory::Properties;
use usage::*;
struct Inner {
freed: HashSet<u64>,
next: u64,
}
struct MockDevice(RefCell<Inner>);
impl MockDevice {
fn new() -> Self {
MockDevice(RefCell::new(Inner {
freed: HashSet::new(),
next: 0,
}))
}
}
impl Device for MockDevice {
type Memory = u64;
unsafe fn allocate(&self, _index: u32, _size: u64) -> Result<u64, AllocationError> {
let mut inner = self.0.borrow_mut();
let id = inner.next;
inner.next = id + 1;
Ok(id)
}
unsafe fn free(&self, memory: u64) {
assert!(self.0.borrow_mut().freed.insert(memory), "Double-free");
}
unsafe fn map(&self, _memory: &u64, _range: Range<u64>) -> Result<NonNull<u8>, MappingError> {
Ok(NonNull::dangling())
}
unsafe fn unmap(&self, _memory: &u64) {}
unsafe fn invalidate<'a>(
&self,
_regions: impl IntoIterator<Item = (&'a u64, Range<u64>)>,
) -> Result<(), OutOfMemoryError> {
unimplemented!()
}
unsafe fn flush<'a>(
&self,
_regions: impl IntoIterator<Item = (&'a u64, Range<u64>)>,
) -> Result<(), OutOfMemoryError> {
unimplemented!()
}
}
fn init() -> Heaps<u64> {
let arena_config = ArenaConfig {
arena_size: 32 * 1024,
};
let dynamic_config = DynamicConfig {
blocks_per_chunk: 64,
block_size_granularity: 256,
max_block_size: 32 * 1024,
};
let small_dynamic_config = DynamicConfig {
blocks_per_chunk: 64,
block_size_granularity: 32,
max_block_size: 1024,
};
unsafe {
Heaps::new(
vec![
(
Properties::DEVICE_LOCAL,
0,
Config {
arena: None,
dynamic: Some(dynamic_config),
},
),
(
Properties::HOST_VISIBLE | Properties::HOST_COHERENT | Properties::DEVICE_LOCAL,
1,
Config {
arena: None,
dynamic: Some(small_dynamic_config),
},
),
(
Properties::HOST_VISIBLE | Properties::HOST_COHERENT,
2,
Config {
arena: Some(arena_config),
dynamic: Some(dynamic_config),
},
),
(
Properties::HOST_VISIBLE | Properties::HOST_COHERENT | Properties::HOST_CACHED,
2,
Config {
arena: Some(arena_config),
dynamic: Some(dynamic_config),
},
),
],
vec![16 * 1024 * 1024, 1 * 1024 * 1024, 32 * 1024 * 1024],
)
}
}
fn random_usage() -> UsageValue {
match rand::random::<u8>() % 4 {
0 => UsageValue::Data,
1 => UsageValue::Download,
2 => UsageValue::Upload,
3 => UsageValue::Dynamic,
_ => unreachable!(),
}
}
#[derive(Debug)]
struct Allocation {
mask: u32,
usage: UsageValue,
size: u64,
align: u64,
}
impl Allocation {
fn random() -> Self {
let usage = random_usage();
let mask = (rand::random::<u32>() % 3) | (1 << rand::random::<u32>() % 2);
let mask = match usage {
UsageValue::Data => mask,
_ => mask << 1,
};
Allocation {
mask,
usage,
size: 1 << (rand::random::<u32>() % 10),
align: 1 << (rand::random::<u32>() % 10),
}
}
fn allocate(
&self,
heaps: &mut Heaps<u64>,
device: &MockDevice,
) -> Result<MemoryBlock<u64>, MemoryError> {
let block = heaps.allocate(device, self.mask, self.usage, self.size, self.align)?;
assert!(block.range().end - block.range().start >= self.size);
assert_eq!(
block.range().start % self.align,
0,
"Block: {:#?} allocated without requested align {}",
block,
self.align
);
assert!(self.usage.memory_fitness(block.properties()).is_some());
assert_ne!((1 << block.memory_type()) & self.mask, 0);
Ok(block)
}
}
#[test]
fn heaps_init() {
let heaps = init();
drop(heaps);
}
#[test]
fn blocks_test() {
let mut heaps = init();
let ref device = MockDevice::new();
let mut blocks = VecList::new();
for _ in 0..32 {
match rand::random::<u8>() % 2 {
0 => {
let allocation = Allocation::random();
match allocation.allocate(&mut heaps, &device) {
Ok(block) => {
blocks.push(block);
}
Err(err) => {
panic!(
"Error({}) occurred for {:#?}. Blocks: {:#?}",
err, allocation, blocks
);
}
}
}
_ if blocks.upper_bound() > 1 => {
let index = rand::random::<usize>() % blocks.upper_bound();
if let Some(block) = blocks.pop(index) {
heaps.free(device, block);
}
}
_ => {}
}
}
for i in 0..blocks.upper_bound() {
if let Some(block) = blocks.pop(i) {
heaps.free(device, block);
}
}
drop(blocks);
println!("Dropping Heaps");
heaps.dispose(device);
}
|
#[doc = "Reader of register DOUTR17"]
pub type R = crate::R<u32, super::DOUTR17>;
#[doc = "Writer for register DOUTR17"]
pub type W = crate::W<u32, super::DOUTR17>;
#[doc = "Register DOUTR17 `reset()`'s with value 0"]
impl crate::ResetValue for super::DOUTR17 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DOUT17`"]
pub type DOUT17_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `DOUT17`"]
pub struct DOUT17_W<'a> {
w: &'a mut W,
}
impl<'a> DOUT17_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - Output data sent to MDIO Master during read frames"]
#[inline(always)]
pub fn dout17(&self) -> DOUT17_R {
DOUT17_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - Output data sent to MDIO Master during read frames"]
#[inline(always)]
pub fn dout17(&mut self) -> DOUT17_W {
DOUT17_W { w: self }
}
}
|
use crate::sys;
use log::{Level as LogLevel, LevelFilter, Metadata, Record, SetLoggerError};
/// See Level enum defined in https://github.com/CommonWA/cwa-spec/blob/master/ns/log.md#write
#[repr(i32)]
#[derive(Debug)]
/// Logging facade for olin. This will be exposed with the [log](https://docs.rs/log)
/// crate, but you can do this manually if you really want.
pub enum Level {
Error = 1,
Debug = 2,
Warning = 3,
Trace = 4,
Info = 6,
}
impl From<LogLevel> for Level {
fn from(ll: LogLevel) -> Level {
match ll {
LogLevel::Error => Level::Error,
LogLevel::Warn => Level::Warning,
LogLevel::Info => Level::Info,
LogLevel::Debug => Level::Debug,
LogLevel::Trace => Level::Trace,
}
}
}
/// Writes a line of text with the specified level to the host logger.
pub fn write(level: Level, text: &str) {
let text = text.as_bytes();
unsafe { sys::log_write(level as i32, text.as_ptr(), text.len()) }
}
/// Convenience wrapper for the error level.
pub fn error(text: &str) {
write(Level::Error, text)
}
/// Convenience wrapper for the warning level.
pub fn warning(text: &str) {
write(Level::Warning, text)
}
/// Convenience wrapper for the info level.
pub fn info(text: &str) {
write(Level::Info, text)
}
pub struct Logger;
impl log::Log for Logger {
fn enabled(&self, _metadata: &Metadata) -> bool {
true
}
fn log(&self, record: &Record) {
write(
record.level().into(),
&format!(r#"{} -- {}"#, record.target(), record.args()),
)
}
fn flush(&self) {}
}
pub(crate) static LOGGER: Logger = Logger;
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Info))
}
|
extern crate rand;
use rand::Rng;
#[derive(Debug)]
enum CoinType {
Copper,
Silver,
Electrum,
Gold,
Platinum
}
#[derive(Debug)]
enum PotionType {
Longevity,
Love,
Poison,
Climbing,
Delusion,
Diminutiveness,
ExtraGrowth,
ExtraHealing,
FireResist,
Flying,
GaseousForm,
GiantPower,
Healing,
HeroicAction,
Invincibility,
Invisibility,
Levitation,
SuperHeroicAction,
SuperSpeed,
WaterBreathing,
}
impl PotionType {
fn random() -> PotionType {
let mut rng = rand::thread_rng();
match rng.gen::<u64>()%20 {
0 => PotionType::Longevity,
1 => PotionType::Love,
2 => PotionType::Poison,
3 => PotionType::Climbing,
4 => PotionType::Delusion,
5 => PotionType::Diminutiveness,
6 => PotionType::ExtraGrowth,
7 => PotionType::ExtraHealing,
8 => PotionType::FireResist,
9 => PotionType::Flying,
10 => PotionType::GaseousForm,
11 => PotionType::GiantPower,
12 => PotionType::Healing,
13 => PotionType::HeroicAction,
14 => PotionType::Invincibility,
15 => PotionType::Invisibility,
16 => PotionType::Levitation,
17 => PotionType::SuperHeroicAction,
18 => PotionType::SuperSpeed,
19 => PotionType::WaterBreathing,
_ => panic!()
}
}
}
#[derive(Debug)]
enum Treasure {
Coins(CoinType),
Potion(PotionType),
}
#[derive(Debug)]
struct TreasureEntry {
quantity: u64,
treasure: Treasure,
}
fn dice_roll(quant: u64, sz: u64) -> u64 {
let mut rng = rand::thread_rng();
let mut ret = 0;
for _ in 0..quant {
ret += rng.gen::<u64>()%sz + 1;
}
ret
}
fn percent_chance(chance: u8) -> bool {
let mut rng = rand::thread_rng();
rng.gen::<u8>()%100 < chance
}
fn main() {
let code = 's';
let mut treasures = Vec::new();
match code {
'j' => treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Copper),
quantity: dice_roll(4, 6)
}),
'k' => treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Silver),
quantity: dice_roll(4, 4)
}),
'l' => treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Electrum),
quantity: dice_roll(3, 4)
}),
'm' => treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Gold),
quantity: dice_roll(1, 8)
}),
'n' => treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Platinum),
quantity: dice_roll(1, 4) + 1
}),
'o' => {
if percent_chance(25) {
treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Copper),
quantity: dice_roll(2, 4)
})
}
if percent_chance(20) {
treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Silver),
quantity: dice_roll(1, 6)
})
}
},
'p' => {
if percent_chance(30) {
treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Silver),
quantity: dice_roll(2, 6)
})
}
if percent_chance(20) {
treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Electrum),
quantity: dice_roll(1, 4)
})
}
},
's' => {
if percent_chance(40) {
for _ in 0..dice_roll(2, 4) {
treasures.push(TreasureEntry {
treasure: Treasure::Potion(PotionType::random()),
quantity: 1
})
}
}
},
'y' => {
if percent_chance(70) {
treasures.push(TreasureEntry {
treasure: Treasure::Coins(CoinType::Gold),
quantity: dice_roll(4, 12)
})
}
}
_ => panic!("Unknown treasure code")
};
println!("TREASURE");
println!("========");
println!("{:?}", treasures);
}
|
extern crate hyrohor;
fn main() {
hyrohor::serve();
}
|
#[doc = r" Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Channel group tasks."]
pub tasks_chg0: TASKS_CHG,
#[doc = "0x08 - Channel group tasks."]
pub tasks_chg2: TASKS_CHG,
_reserved0: [u8; 1264usize],
#[doc = "0x500 - Channel enable."]
pub chen: CHEN,
#[doc = "0x504 - Channel enable set."]
pub chenset: CHENSET,
#[doc = "0x508 - Channel enable clear."]
pub chenclr: CHENCLR,
_reserved1: [u8; 4usize],
#[doc = "0x510 - PPI Channel."]
pub ch0: CH,
#[doc = "0x518 - PPI Channel."]
pub ch2: CH,
#[doc = "0x520 - PPI Channel."]
pub ch4: CH,
#[doc = "0x528 - PPI Channel."]
pub ch6: CH,
#[doc = "0x530 - PPI Channel."]
pub ch8: CH,
#[doc = "0x538 - PPI Channel."]
pub ch10: CH,
#[doc = "0x540 - PPI Channel."]
pub ch12: CH,
#[doc = "0x548 - PPI Channel."]
pub ch14: CH,
_reserved2: [u8; 688usize],
#[doc = "0x800 - Channel group configuration."]
pub chg: [CHG; 4],
}
#[doc = r" Register block"]
#[repr(C)]
pub struct TASKS_CHG {
#[doc = "0x00 - Enable channel group."]
pub en: self::tasks_chg::EN,
#[doc = "0x04 - Disable channel group."]
pub dis: self::tasks_chg::DIS,
}
#[doc = r" Register block"]
#[doc = "Channel group tasks."]
pub mod tasks_chg;
#[doc = r" Register block"]
#[repr(C)]
pub struct CH {
#[doc = "0x00 - Channel event end-point."]
pub eep: self::ch::EEP,
#[doc = "0x04 - Channel task end-point."]
pub tep: self::ch::TEP,
}
#[doc = r" Register block"]
#[doc = "PPI Channel."]
pub mod ch;
#[doc = "Channel enable."]
pub struct CHEN {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Channel enable."]
pub mod chen;
#[doc = "Channel enable set."]
pub struct CHENSET {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Channel enable set."]
pub mod chenset;
#[doc = "Channel enable clear."]
pub struct CHENCLR {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Channel enable clear."]
pub mod chenclr;
#[doc = "Channel group configuration."]
pub struct CHG {
register: ::vcell::VolatileCell<u32>,
}
#[doc = "Channel group configuration."]
pub mod chg;
|
use std::fmt;
use libc::c_int;
use libc::{EACCES, EIO, ENOENT};
#[derive(Debug)]
pub enum Error {
NotFound,
Forbidden,
IOError,
}
impl Error {
pub fn to_error_code(&self) -> c_int {
match self {
Error::NotFound => ENOENT,
Error::Forbidden => EACCES,
Error::IOError => EIO,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {}
pub type Result<T> = std::result::Result<T, Error>;
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:hygiene_example_codegen.rs
// aux-build:hygiene_example.rs
// ignore-stage1
#![feature(proc_macro_non_items)]
extern crate hygiene_example;
use hygiene_example::hello;
fn main() {
mod hygiene_example {} // no conflict with `extern crate hygiene_example;` from the proc macro
macro_rules! format { () => {} } // does not interfere with `format!` from the proc macro
macro_rules! hello_helper { () => {} } // similarly does not intefere with the proc macro
let string = "world"; // no conflict with `string` from the proc macro
hello!(string);
hello!(string);
}
|
mod data;
mod extension;
pub use data::Data;
pub use extension::ExtensionValue;
|
fn main() {
let mut lines = aoc::file_lines_iter("./day13.txt");
let departure = lines.next().unwrap().parse::<u64>().unwrap();
let busses : Vec<_> = lines.next().unwrap().split(',').map(|s| s.to_string()).collect();
let bus_ids : Vec <u64> = busses.iter().filter(|v| v.as_str() != "x")
.map(|v| v.parse::<u64>().unwrap())
.collect();
let mut min_time = u64::MAX;
let mut bus_id = 0;
for id in bus_ids {
let time = id - (departure % id);
if time < min_time {
min_time = time;
bus_id = id;
}
}
println!("Part 1: {}", bus_id * min_time);
let mut bus_ids2 : Vec<(u64, u64)> = Vec::new();
let mut offset = 0;
for v in busses {
if v.as_str() != "x" {
bus_ids2.push((v.parse::<u64>().unwrap(), offset));
}
offset += 1;
}
let mut busses = bus_ids2.iter();
let (mut i, mut step) = (0, busses.next().unwrap().0);
for (id, offset) in busses {
while (i + offset) % id != 0 {
i += step;
}
step *= id;
}
println!("Part 2: {} ", i);
}
|
use std::ops::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
pub use serde_json::value::RawValue as JsonRawValue;
pub use serde_json::Value as JsonValue;
use crate::database::{Database, HasArguments, HasValueRef};
use crate::decode::Decode;
use crate::encode::{Encode, IsNull};
use crate::error::BoxDynError;
use crate::types::Type;
/// Json for json and jsonb fields
///
/// Will attempt to cast to type passed in as the generic.
///
/// ```toml
/// [dependencies]
/// serde_json = { version = "1.0", features = ["raw_value"] }
///
/// ```
///
/// # Example
///
/// ```
/// # use serde::Deserialize;
/// #[derive(Deserialize)]
/// struct Book {
/// name: String
/// }
///
/// #[derive(sqlx::FromRow)]
/// struct Author {
/// name: String,
/// books: sqlx::types::Json<Book>
/// }
/// ```
///
/// Can also be used to turn the json/jsonb into a hashmap
/// ```
/// use std::collections::HashMap;
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct Book {
/// name: String
/// }
/// #[derive(sqlx::FromRow)]
/// struct Library {
/// id: String,
/// dewey_decimal: sqlx::types::Json<HashMap<String, Book>>
/// }
/// ```
#[derive(
Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
#[serde(transparent)]
pub struct Json<T: ?Sized>(pub T);
impl<T> Deref for Json<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for Json<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T> AsRef<T> for Json<T> {
fn as_ref(&self) -> &T {
&self.0
}
}
impl<T> AsMut<T> for Json<T> {
fn as_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<DB> Type<DB> for JsonValue
where
Json<Self>: Type<DB>,
DB: Database,
{
fn type_info() -> DB::TypeInfo {
<Json<Self> as Type<DB>>::type_info()
}
fn compatible(ty: &DB::TypeInfo) -> bool {
<Json<Self> as Type<DB>>::compatible(ty)
}
}
impl<'q, DB> Encode<'q, DB> for JsonValue
where
for<'a> Json<&'a Self>: Encode<'q, DB>,
DB: Database,
{
fn encode_by_ref(&self, buf: &mut <DB as HasArguments<'q>>::ArgumentBuffer) -> IsNull {
<Json<&Self> as Encode<'q, DB>>::encode(Json(self), buf)
}
}
impl<'r, DB> Decode<'r, DB> for JsonValue
where
Json<Self>: Decode<'r, DB>,
DB: Database,
{
fn decode(value: <DB as HasValueRef<'r>>::ValueRef) -> Result<Self, BoxDynError> {
<Json<Self> as Decode<DB>>::decode(value).map(|item| item.0)
}
}
impl<DB> Type<DB> for JsonRawValue
where
for<'a> Json<&'a Self>: Type<DB>,
DB: Database,
{
fn type_info() -> DB::TypeInfo {
<Json<&Self> as Type<DB>>::type_info()
}
fn compatible(ty: &DB::TypeInfo) -> bool {
<Json<&Self> as Type<DB>>::compatible(ty)
}
}
// We don't have to implement Encode for JsonRawValue because that's covered by the default
// implementation for Encode
impl<'r, DB> Decode<'r, DB> for &'r JsonRawValue
where
Json<Self>: Decode<'r, DB>,
DB: Database,
{
fn decode(value: <DB as HasValueRef<'r>>::ValueRef) -> Result<Self, BoxDynError> {
<Json<Self> as Decode<DB>>::decode(value).map(|item| item.0)
}
}
|
fn commands(input: &str) -> impl Iterator<Item = (&str, u32)> {
input.lines().map(|line| {
let (fst, sec) = line.split_once(' ').unwrap();
(fst, sec.parse().unwrap())
})
}
fn part1(input: &str) -> u32 {
let mut hpos = 0;
let mut depth = 0;
for (dir, amount) in commands(input) {
match dir {
"forward" => hpos += amount,
"down" => depth += amount,
"up" => depth -= amount,
_ => panic!("Invalid direction: {dir}"),
}
}
hpos * depth
}
fn part2(input: &str) -> u32 {
let mut hpos = 0;
let mut depth = 0;
let mut aim = 0;
for (dir, amount) in commands(input) {
match dir {
"forward" => {
hpos += amount;
depth += aim * amount;
}
"down" => aim += amount,
"up" => aim -= amount,
_ => panic!("Invalid direction: {dir}"),
}
}
hpos * depth
}
#[cfg(test)]
const INPUT: &str = "forward 5
down 5
forward 8
up 3
down 8
forward 2";
aoc::tests! {
fn part1:
INPUT => 150;
in => 1762050;
fn part2:
INPUT => 900;
in => 1855892637;
}
aoc::main!(part1, part2);
|
use {Script, Command, rect};
fn draw(x: u32, y: u32, w: u32, h: u32, fill: char) -> Command {
rect(x, y, w, h, fill)
}
#[test]
fn empty_space() {
assert_eq!(Script::new(5, 3).run().as_slice(),
".....\n\
.....\n\
.....\n");
}
#[test]
fn three_by_three_b() {
let cmds = vec![
draw(1, 1, 3, 3, 'b')
];
assert_eq!(Script::new_commands(5, 5, cmds[]).run().as_slice(),
".....\n\
.+-+.\n\
.|b|.\n\
.+-+.\n\
.....\n");
}
#[test]
fn three_by_four_b() {
let cmds = vec![
draw(1, 1, 3, 4, 'b')
];
assert_eq!(Script::new_commands(5, 5, cmds[]).run().as_slice(),
".....\n\
.+-+.\n\
.|b|.\n\
.|b|.\n\
.+-+.\n");
}
#[test]
fn three_by_four_c() {
let cmds = vec![
draw(1, 1, 3, 4, 'c')
];
assert_eq!(Script::new_commands(5, 5, cmds[]).run().as_slice(),
".....\n\
.+-+.\n\
.|c|.\n\
.|c|.\n\
.+-+.\n");
}
#[test]
fn side_by_side() {
let cmds = vec![
draw(1, 0, 3, 4, 'b'),
draw(6, 2, 3, 3, 'c')
];
assert_eq!(Script::new_commands(10, 5, cmds[]).run().as_slice(),
".+-+......\n\
.|b|......\n\
.|b|..+-+.\n\
.+-+..|c|.\n\
......+-+.\n");
}
#[test]
fn overlapping() {
let cmds = vec![
draw(0, 0, 3, 4, 'b'),
draw(1, 2, 3, 3, 'c')
];
assert_eq!(Script::new_commands(5, 5, cmds[]).run().as_slice(),
"+-+..\n\
|b|..\n\
|+-+.\n\
+|c|.\n\
.+-+.\n");
}
#[test]
fn stout() {
let cmds = vec![
draw(1, 1, 5, 2, 'b'),
];
assert_eq!(Script::new_commands(7, 7, cmds[]).run().as_slice(),
".......\n\
.+---+.\n\
.+---+.\n\
.......\n\
.......\n\
.......\n\
.......\n");
}
#[test]
fn skinny() {
let cmds = vec![
draw(1, 1, 2, 5, 'b'),
];
assert_eq!(Script::new_commands(7, 7, cmds[]).run().as_slice(),
".......\n\
.++....\n\
.||....\n\
.||....\n\
.||....\n\
.++....\n\
.......\n");
}
#[test]
fn four_by_one_b() {
let cmds = vec![
draw(1, 1, 4, 1, 'b'),
];
assert_eq!(Script::new_commands(7, 7, cmds[]).run().as_slice(),
".......\n\
.bbbb..\n\
.......\n\
.......\n\
.......\n\
.......\n\
.......\n");
}
#[test]
fn one_by_four_c() {
let cmds = vec![
draw(2, 1, 1, 4, 'c'),
];
assert_eq!(Script::new_commands(7, 7, cmds[]).run().as_slice(),
".......\n\
..c....\n\
..c....\n\
..c....\n\
..c....\n\
.......\n\
.......\n");
}
#[test]
fn max_box() {
let cmds = vec![
draw(0, 0, 5, 3, 'b'),
];
assert_eq!(Script::new_commands(5, 3, cmds[]).run().as_slice(),
"+---+\n\
|bbb|\n\
+---+\n");
}
|
use crate::geom::HitSide;
use crate::math::{Unit3, Vec3};
pub fn cos_theta(dir: Unit3) -> f64 {
dir[2]
}
pub fn sin_theta(dir: Unit3) -> f64 {
(1. - cos_theta(dir).powi(2)).sqrt()
}
pub fn same_hemisphere(incoming: Vec3, outgoing: Vec3) -> bool {
incoming[2] * outgoing[2] > 0.
}
#[derive(Debug, Clone, Copy)]
pub struct ShadingInfo {
pub side: HitSide,
pub outgoing: Unit3,
}
impl ShadingInfo {
pub fn cos_theta(&self) -> f64 {
cos_theta(self.outgoing)
}
pub fn sin_theta(&self) -> f64 {
sin_theta(self.outgoing)
}
}
#[derive(Debug, Clone, Copy)]
pub enum Pdf {
Real(f64),
Delta,
}
impl Pdf {
pub fn factor(&self) -> f64 {
match self {
Pdf::Real(val) => 1. / val,
Pdf::Delta => 1.,
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct SampledRadiance {
pub dir: Unit3,
pub color: Vec3,
pub pdf: Pdf,
}
impl SampledRadiance {
pub fn new_real(dir: Unit3, color: Vec3, pdf: f64) -> Self {
Self {
dir,
color,
pdf: Pdf::Real(pdf),
}
}
pub fn new_delta(dir: Unit3, color: Vec3) -> Self {
Self {
dir,
color,
pdf: Pdf::Delta,
}
}
pub fn scaled_color(&self) -> Vec3 {
cos_theta(self.dir) * self.pdf.factor() * self.color
}
}
|
use std::io;
use std::fmt;
use ocl;
#[derive(Debug)]
pub enum Error {
Io(io::Error),
Ocl(ocl::Error),
Other(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Io(e) => write!(f, "Io: {:?}\n{}", e.kind(), e),
Error::Ocl(e) => write!(f, "Ocl:\n{}", e),
Error::Other(s) => write!(f, "Other:\n{}", s),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl From<ocl::Error> for Error {
fn from(e: ocl::Error) -> Self {
Error::Ocl(e)
}
}
impl From<String> for Error {
fn from(s: String) -> Self {
Error::Other(s)
}
}
impl From<&str> for Error {
fn from(s: &str) -> Self {
Error::Other(s.to_string())
}
}
|
extern crate actix_web;
extern crate serde;
use actix_web::{web, App, HttpRequest, HttpServer, Responder};
mod enums;
mod packages;
mod objects;
use crate::objects::token::Token;
fn greet(req: HttpRequest) -> impl Responder {
let name = req.match_info().get("name").unwrap_or("World");
format!("Hello {}!", &name)
}
fn json_encode() -> impl Responder {
web::Json(Token::default())
}
fn main() {
HttpServer::new(|| {
App::new()
.route("/", web::get().to(greet))
.route("/test", web::get().to(json_encode))
.route("/{name}", web::get().to(greet))
})
.bind("127.0.0.1:8000")
.expect("Can not bind to port 8000")
.run()
.unwrap();
}
|
use super::abstract_container::AbstractContainer;
use super::component::Component;
use std::slice::IterMut;
pub struct Container<T: Component> {
components: Vec<T>,
}
impl<T: Component> Container<T> {
pub fn new() -> Container<T> {
Container {
components: Vec::new(),
}
}
pub fn iter_mut(&mut self) -> IterMut<T> {
self.components.iter_mut()
}
pub fn insert(&mut self, component: T) {
self.components.push(component);
}
}
impl<T: Component> Default for Container<T> {
fn default() -> Container<T> {
Container::new()
}
}
impl<T: Component> AbstractContainer for Container<T> {
fn len(&self) -> usize {
self.components.len()
}
}
|
use std::any::TypeId;
fn string() -> TypeId {
TypeId::of::<String>()
}
fn foo(para_type: TypeId) {
if TypeId::of::<String>() == para_type {
println!("参数为类型 String");
}
}
fn main() {
foo(string());
}
|
use super::{
flash::{ConfigWriter, FlashError},
BtnsType, NUM_BTS,
};
use core::{
convert::TryFrom,
sync::atomic::{compiler_fence, Ordering},
};
use debouncer::typenum::consts::*;
use debouncer::{BtnState, PortDebouncer};
use heapless::spsc::Producer;
use keylib::{
key_code::{
valid_ranges::{ZONE1_FIRST, ZONE1_LAST, ZONE2_FIRST, ZONE2_LAST},
KbHidReport, KeyCode,
},
packets::{AppCommand, DescriptorType, ReportType, Request, VendorCommand},
CTRL_INTERFACE,
};
use usb_device::{
bus::{InterfaceNumber, StringIndex, UsbBus, UsbBusAllocator},
class::{ControlIn, ControlOut, UsbClass},
control::{self, Recipient, RequestType},
descriptor::DescriptorWriter,
endpoint::{EndpointAddress, EndpointIn},
UsbError,
};
#[rustfmt::skip]
const KEY_REPORT_DESCRIPTOR: &[u8] = &[
0x05, 0x01, // Usage Page (Generic Desktop Ctrls)
0x09, 0x06, // Usage (Keyboard)
0xA1, 0x01, // Collection (Application)
0x05, 0x07, // Usage Page (Kbrd/Keypad)
0x19, 0xE0, // Usage Minimum (0xE0)
0x29, 0xE7, // Usage Maximum (0xE7)
0x15, 0x00, // Logical Minimum (0)
0x25, 0x01, // Logical Maximum (1)
0x75, 0x01, // Report Size (1)
0x95, 0x08, // Report Count (8)
0x81, 0x02, // Input (Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x95, 0x01, // Report Count (1)
0x75, 0x08, // Report Size (8)
0x81, 0x03, // Input (Const,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)
0x95, 0x06, // Report Count (6)
0x75, 0x08, // Report Size (8)
0x15, 0x00, // Logical Minimum (0)
0x26, 0xFB, 0x00, // Logical Maximum (0xFB)
0x05, 0x07, // Usage Page (Kbrd/Keypad)
0x19, 0x00, // Usage Minimum (0x00)
0x29, 0xFB, // Usage Maximum (0xFB)
0x81, 0x00, // Input (Data,Array,Abs,No Wrap,Linear,Preferred State,No Null Position)
0xC0, // End Collection
];
// Windows doesn't let you access a keyboard interface, so create another interface for
// configuration. A WinUSB interface would be better, but I hit libusb #619.
#[rustfmt::skip]
const CTRL_REPORT_DESCRIPTOR: &[u8] = &[
0x06, 0x00, 0xFF, // Usage Page (Vendor Defined 0xFF00)
0x09, 0x01, // Usage (Vendor 1)
0xA1, 0x01, // Collection (Application)
0x09, 0x01, // Usage (Vendor 1)
0x15, 0x00, // Logical Minimum (0)
0x26, 0xFF, 0x00, // Logical Maximum (255)
0x75, 0x08, // Report Size (8)
0x95, 0x02, // Report Count (2)
0xB1, 0x02, // Feature (Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position,Non-volatile)
0xC0, // End Collection
];
const SPECIFICATION_RELEASE: u16 = 0x111;
const INTERFACE_CLASS_HID: u8 = 0x03;
const SUBCLASS_NONE: u8 = 0x00;
const KEYBOARD_PROTOCOL: u8 = 0x01;
pub struct Keykey<'a, 'b, B: UsbBus> {
interface: InterfaceNumber,
ctrl_interface: InterfaceNumber,
endpoint_interrupt_in: EndpointIn<'a, B>,
dummy_endpoint: EndpointIn<'a, B>,
expect_interrupt_in_complete: bool,
report: KbHidReport,
cmd_prod: Producer<'b, AppCommand, U8>,
}
impl<'a, 'b, B: UsbBus> Keykey<'a, 'b, B> {
pub fn new(alloc: &'a UsbBusAllocator<B>, prod: Producer<'b, AppCommand, U8>) -> Self {
let key_interface = alloc.interface();
// We want key interface to be 0 and ctrl interface to be 1, We use this because hidapi on
// linux can't retrieve usage_page/usage correctly, so we need to know the number of the
// control interface before hand.
compiler_fence(Ordering::SeqCst);
let keykey = Self {
interface: key_interface,
ctrl_interface: alloc.interface(),
endpoint_interrupt_in: alloc.interrupt(8, 10),
dummy_endpoint: alloc.interrupt(16, 10),
expect_interrupt_in_complete: false,
report: KbHidReport::new(),
cmd_prod: prod,
};
// This should always be true, given how `alloc.interface()` is implemented, this assert is
// here to be precautious about future changes.
assert_eq!(u8::from(keykey.ctrl_interface), CTRL_INTERFACE);
keykey
}
pub fn write(&mut self, data: &[u8]) -> Result<usize, ()> {
if self.expect_interrupt_in_complete {
return Ok(0);
}
if data.len() >= 8 {
self.expect_interrupt_in_complete = true;
}
match self.endpoint_interrupt_in.write(data) {
Ok(count) => Ok(count),
Err(UsbError::WouldBlock) => Ok(0),
Err(_) => Err(()),
}
}
pub fn set_keyboard_report(&mut self, report: KbHidReport) -> bool {
if report == self.report {
false
} else {
self.report = report;
true
}
}
fn get_report(&mut self, xfer: ControlIn<B>) {
let req = xfer.request();
let [report_type, _report_id] = req.value.to_be_bytes();
let report_type = ReportType::from(report_type);
let interface = req.index as u8;
let response = if interface == u8::from(self.interface) {
self.report.as_bytes()
} else if interface == u8::from(self.ctrl_interface) {
&[0; 16]
} else {
// This isn't for us
return;
};
if req.length < response.len() as u16 {
xfer.reject().ok();
return;
}
match report_type {
ReportType::Input | ReportType::Feature => xfer.accept_with(response).ok(),
_ => xfer.reject().ok(),
};
}
}
impl<B: UsbBus> UsbClass<B> for Keykey<'_, '_, B> {
fn poll(&mut self) {}
fn reset(&mut self) {
self.expect_interrupt_in_complete = false;
}
fn get_configuration_descriptors(
&self,
writer: &mut DescriptorWriter,
) -> usb_device::Result<()> {
writer.interface(
self.interface,
INTERFACE_CLASS_HID,
SUBCLASS_NONE,
KEYBOARD_PROTOCOL,
)?;
let descriptor_len = KEY_REPORT_DESCRIPTOR.len();
if descriptor_len > u16::max_value() as usize {
return Err(UsbError::InvalidState);
}
let descriptor_len = (descriptor_len as u16).to_le_bytes();
let specification_release = SPECIFICATION_RELEASE.to_le_bytes();
writer.write(
DescriptorType::Hid as u8,
&[
specification_release[0], // bcdHID.lower
specification_release[1], // bcdHID.upper
0, // bCountryCode: 0 = not supported
1, // bNumDescriptors
DescriptorType::Report as u8, // bDescriptorType
descriptor_len[0], // bDescriptorLength.lower
descriptor_len[1], // bDescriptorLength.upper
],
)?;
writer.endpoint(&self.endpoint_interrupt_in)?;
// CTRL interface
writer.interface(self.ctrl_interface, INTERFACE_CLASS_HID, SUBCLASS_NONE, 0)?;
let descriptor_len = CTRL_REPORT_DESCRIPTOR.len();
if descriptor_len > u16::max_value() as usize {
return Err(UsbError::InvalidState);
}
let descriptor_len = (descriptor_len as u16).to_le_bytes();
let specification_release = SPECIFICATION_RELEASE.to_le_bytes();
writer.write(
DescriptorType::Hid as u8,
&[
specification_release[0], // bcdHID.lower
specification_release[1], // bcdHID.upper
0, // bCountryCode: 0 = not supported
1, // bNumDescriptors
DescriptorType::Report as u8, // bDescriptorType
descriptor_len[0], // bDescriptorLength.lower
descriptor_len[1], // bDescriptorLength.upper
],
)?;
writer.endpoint(&self.dummy_endpoint)?;
Ok(())
}
fn get_string(&self, _index: StringIndex, _lang_id: u16) -> Option<&str> {
None
}
fn endpoint_in_complete(&mut self, addr: EndpointAddress) {
if addr == self.endpoint_interrupt_in.address() {
self.expect_interrupt_in_complete = false;
}
}
fn endpoint_out(&mut self, _addr: EndpointAddress) {}
fn control_in(&mut self, xfer: ControlIn<B>) {
let req = xfer.request();
match (req.request_type, req.recipient) {
(RequestType::Standard, Recipient::Interface) => {
if req.request == control::Request::GET_DESCRIPTOR {
let (desc_type, desc_index) = req.descriptor_type_index();
// We only have one report for each interface
if desc_type == DescriptorType::Report as u8 && desc_index == 0 {
let report = if req.index == u8::from(self.interface) as u16 {
KEY_REPORT_DESCRIPTOR
} else if req.index == u8::from(self.ctrl_interface) as u16 {
CTRL_REPORT_DESCRIPTOR
} else {
// This isn't for us
return;
};
let n = report.len().min(req.length as usize);
log!("Sending HID report, iface: {:?}, len: {:?}", req.index, n);
xfer.accept_with_static(&report[..n]).ok();
}
}
}
(RequestType::Class, Recipient::Interface) => {
if let Some(Request::GetReport) = Request::new(req.request) {
self.get_report(xfer);
}
}
_ => {}
}
}
fn control_out(&mut self, xfer: ControlOut<B>) {
let req = xfer.request();
// Check if this is for us
if req.request_type == RequestType::Class
&& req.recipient == Recipient::Interface
&& req.index == u8::from(self.ctrl_interface) as u16
{
if let Some(Request::SetReport) = Request::new(req.request) {
let data = xfer.data();
if data.len() == 2 {
if let (Ok(cmd), Ok(key)) =
(VendorCommand::try_from(data[0]), KeyCode::try_from(data[1]))
{
if self
.cmd_prod
.enqueue(AppCommand::from_req_value(cmd, key))
.is_ok()
{
xfer.accept().ok();
return;
}
}
}
}
log!(
"Couldn't process request, req: {:?}, data: {:?}",
req,
xfer.data()
);
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct Matrix {
layout: [KeyCode; NUM_BTS],
}
impl Matrix {
pub const fn new() -> Self {
Self {
layout: [KeyCode::A, KeyCode::B, KeyCode::C],
}
}
pub fn update_layout(
&mut self,
command: AppCommand,
writer: &mut ConfigWriter,
) -> Result<(), FlashError> {
match command {
AppCommand::Set1(value) => self.layout[0] = value,
AppCommand::Set2(value) => self.layout[1] = value,
AppCommand::Set3(value) => self.layout[2] = value,
AppCommand::Save => writer.write_config(*self)?,
};
Ok(())
}
pub fn update(&self, debouncer: &mut PortDebouncer<U8, BtnsType>) -> KbHidReport {
let mut report = KbHidReport::new();
for (index, &btn) in self.layout.iter().enumerate() {
let state = debouncer.get_state(index);
if let Ok(value) = state {
if value != BtnState::UnPressed {
report.pressed(btn);
}
}
}
report
}
pub fn to_bytes(self) -> [u8; NUM_BTS] {
// NOTE(unsafe) `self.layout` is `[KeyCode; NUM_BTS]` and `KeyCode` is `repr(u8)`
unsafe { core::mem::transmute(self.layout) }
}
pub fn from_bytes(bytes: [u8; NUM_BTS]) -> Option<Self> {
// Look for invalid codes
#[allow(clippy::absurd_extreme_comparisons)]
let invalid_code = bytes.iter().any(|&code| {
// The first test will probably get optimized out when `ZONE1_FIRST` == 0, but we do it
// anyway because that can change
(code < ZONE1_FIRST) || (code > ZONE1_LAST && code < ZONE2_FIRST) || (code > ZONE2_LAST)
});
if invalid_code {
None
} else {
// NOTE(unsafe) safe based on the check above
unsafe {
Some(Self {
layout: core::mem::transmute(bytes),
})
}
}
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOrigin(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOrigin {
type Vtable = IPhoneCallOrigin_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x20613479_0ef9_4454_871c_afb66a14b6a5);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOrigin_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOrigin2(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOrigin2 {
type Vtable = IPhoneCallOrigin2_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x04c7e980_9ac2_4768_b536_b68da4957d02);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOrigin2_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOrigin3(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOrigin3 {
type Vtable = IPhoneCallOrigin3_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x49330fb4_d1a7_43a2_aeee_c07b6dbaf068);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOrigin3_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Storage")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage"))] usize,
#[cfg(feature = "Storage")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Storage"))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOriginManagerStatics {
type Vtable = IPhoneCallOriginManagerStatics_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xccfc5a0a_9af7_6149_39d0_e076fcce1395);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, requestid: ::windows::core::GUID, callorigin: ::windows::core::RawPtr) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics2(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOriginManagerStatics2 {
type Vtable = IPhoneCallOriginManagerStatics2_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x8bf3ee3f_40f4_4380_8c7c_aea2c9b8dd7a);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics2_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics3(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPhoneCallOriginManagerStatics3 {
type Vtable = IPhoneCallOriginManagerStatics3_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x2ed69764_a6e3_50f0_b76a_d67cb39bdfde);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPhoneCallOriginManagerStatics3_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct PhoneCallOrigin(pub ::windows::core::IInspectable);
impl PhoneCallOrigin {
pub fn new() -> ::windows::core::Result<Self> {
Self::IActivationFactory(|f| f.activate_instance::<Self>())
}
fn IActivationFactory<R, F: FnOnce(&::windows::core::IActivationFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<PhoneCallOrigin, ::windows::core::IActivationFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
pub fn Category(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn SetCategory<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = self;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn CategoryDescription(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn SetCategoryDescription<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = self;
unsafe { (::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn Location(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn SetLocation<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = self;
unsafe { (::windows::core::Interface::vtable(this).11)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn DisplayName(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = &::windows::core::Interface::cast::<IPhoneCallOrigin2>(self)?;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn SetDisplayName<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<IPhoneCallOrigin2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
#[cfg(feature = "Storage")]
pub fn DisplayPicture(&self) -> ::windows::core::Result<super::super::super::Storage::StorageFile> {
let this = &::windows::core::Interface::cast::<IPhoneCallOrigin3>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Storage::StorageFile>(result__)
}
}
#[cfg(feature = "Storage")]
pub fn SetDisplayPicture<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Storage::StorageFile>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<IPhoneCallOrigin3>(self)?;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
}
unsafe impl ::windows::core::RuntimeType for PhoneCallOrigin {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.ApplicationModel.Calls.Provider.PhoneCallOrigin;{20613479-0ef9-4454-871c-afb66a14b6a5})");
}
unsafe impl ::windows::core::Interface for PhoneCallOrigin {
type Vtable = IPhoneCallOrigin_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x20613479_0ef9_4454_871c_afb66a14b6a5);
}
impl ::windows::core::RuntimeName for PhoneCallOrigin {
const NAME: &'static str = "Windows.ApplicationModel.Calls.Provider.PhoneCallOrigin";
}
impl ::core::convert::From<PhoneCallOrigin> for ::windows::core::IUnknown {
fn from(value: PhoneCallOrigin) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&PhoneCallOrigin> for ::windows::core::IUnknown {
fn from(value: &PhoneCallOrigin) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for PhoneCallOrigin {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a PhoneCallOrigin {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<PhoneCallOrigin> for ::windows::core::IInspectable {
fn from(value: PhoneCallOrigin) -> Self {
value.0
}
}
impl ::core::convert::From<&PhoneCallOrigin> for ::windows::core::IInspectable {
fn from(value: &PhoneCallOrigin) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for PhoneCallOrigin {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a PhoneCallOrigin {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for PhoneCallOrigin {}
unsafe impl ::core::marker::Sync for PhoneCallOrigin {}
pub struct PhoneCallOriginManager {}
impl PhoneCallOriginManager {
pub fn IsCurrentAppActiveCallOriginApp() -> ::windows::core::Result<bool> {
Self::IPhoneCallOriginManagerStatics(|this| unsafe {
let mut result__: bool = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<bool>(result__)
})
}
pub fn ShowPhoneCallOriginSettingsUI() -> ::windows::core::Result<()> {
Self::IPhoneCallOriginManagerStatics(|this| unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this)).ok() })
}
pub fn SetCallOrigin<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::GUID>, Param1: ::windows::core::IntoParam<'a, PhoneCallOrigin>>(requestid: Param0, callorigin: Param1) -> ::windows::core::Result<()> {
Self::IPhoneCallOriginManagerStatics(|this| unsafe { (::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), requestid.into_param().abi(), callorigin.into_param().abi()).ok() })
}
#[cfg(feature = "Foundation")]
pub fn RequestSetAsActiveCallOriginAppAsync() -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<bool>> {
Self::IPhoneCallOriginManagerStatics2(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<bool>>(result__)
})
}
pub fn IsSupported() -> ::windows::core::Result<bool> {
Self::IPhoneCallOriginManagerStatics3(|this| unsafe {
let mut result__: bool = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<bool>(result__)
})
}
pub fn IPhoneCallOriginManagerStatics<R, F: FnOnce(&IPhoneCallOriginManagerStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<PhoneCallOriginManager, IPhoneCallOriginManagerStatics> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
pub fn IPhoneCallOriginManagerStatics2<R, F: FnOnce(&IPhoneCallOriginManagerStatics2) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<PhoneCallOriginManager, IPhoneCallOriginManagerStatics2> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
pub fn IPhoneCallOriginManagerStatics3<R, F: FnOnce(&IPhoneCallOriginManagerStatics3) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<PhoneCallOriginManager, IPhoneCallOriginManagerStatics3> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
impl ::windows::core::RuntimeName for PhoneCallOriginManager {
const NAME: &'static str = "Windows.ApplicationModel.Calls.Provider.PhoneCallOriginManager";
}
|
//! /api/v0/pubsub module.
//!
//! /api/v0/pubsub/sub?arg=topic allows multiple clients to subscribe to the single topic, with
//! semantics of getting the messages received on that topic from request onwards. This is
//! implemented with [`tokio::sync::broadcast`] which supports these semantics.
//!
//! # Panics
//!
//! The subscription functionality *assumes* that there are no other users for
//! `ipfs::Ipfs::pubsub_subscribe` and thus will panic if an subscription was made outside of this
//! locking mechanism.
use futures::stream::{Stream, TryStream};
use serde::{Deserialize, Serialize};
use tokio::sync::{broadcast, Mutex};
use tokio::time::timeout;
use tokio_stream::StreamExt;
use ipfs::{Ipfs, IpfsTypes};
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::fmt;
use std::sync::Arc;
use std::time::Duration;
use bytes::{Buf, Bytes};
use warp::{Filter, Rejection};
use crate::v0::support::{
try_only_named_multipart, with_ipfs, NonUtf8Topic, OnlyMultipartFailure,
RequiredArgumentMissing, StreamResponse, StringError,
};
use mime::Mime;
#[derive(Default)]
pub struct Pubsub {
subscriptions:
Mutex<HashMap<String, broadcast::Sender<Result<PreformattedJsonMessage, StreamError>>>>,
}
/// Handling of https://docs-beta.ipfs.io/reference/http/api/#api-v0-pubsub-peers
pub fn peers<T: IpfsTypes>(
ipfs: &Ipfs<T>,
) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
with_ipfs(ipfs)
.and(warp::query::<OptionalTopicParameter>().map(|tp: OptionalTopicParameter| tp.topic))
.and_then(inner_peers)
}
async fn inner_peers<T: IpfsTypes>(
ipfs: Ipfs<T>,
topic: Option<String>,
) -> Result<(impl warp::Reply,), warp::Rejection> {
let peers = ipfs
.pubsub_peers(topic)
.await
.map_err(|e| warp::reject::custom(StringError::from(e)))?;
Ok((warp::reply::json(&StringListResponse {
strings: peers.into_iter().map(|id| id.to_string()).collect(),
}),))
}
/// Handling of https://docs-beta.ipfs.io/reference/http/api/#api-v0-pubsub-ls
pub fn list_subscriptions<T: IpfsTypes>(
ipfs: &Ipfs<T>,
) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
with_ipfs(ipfs).and_then(inner_ls)
}
async fn inner_ls<T: IpfsTypes>(ipfs: Ipfs<T>) -> Result<(impl warp::Reply,), warp::Rejection> {
let topics = ipfs
.pubsub_subscribed()
.await
.map_err(|e| warp::reject::custom(StringError::from(e)))?;
Ok((warp::reply::json(&StringListResponse { strings: topics }),))
}
/// Handling of https://docs-beta.ipfs.io/reference/http/api/#api-v0-pubsub-pub
pub fn publish<T: IpfsTypes>(
ipfs: &Ipfs<T>,
) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
with_ipfs(ipfs)
.and(publish_args("arg"))
.and_then(inner_publish)
}
async fn inner_publish<T: IpfsTypes>(
ipfs: Ipfs<T>,
PublishArgs { topic, message }: PublishArgs,
) -> Result<(impl warp::Reply,), warp::Rejection> {
ipfs.pubsub_publish(topic, message.into_inner())
.await
.map_err(|e| warp::reject::custom(StringError::from(e)))?;
Ok((warp::reply::reply(),))
}
/// Handling of https://docs-beta.ipfs.io/reference/http/api/#api-v0-pubsub-sub
///
/// # Panics
///
/// Note the module documentation.
pub fn subscribe<T: IpfsTypes>(
ipfs: &Ipfs<T>,
pubsub: Arc<Pubsub>,
) -> impl warp::Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
with_ipfs(ipfs)
.and(warp::any().map(move || pubsub.clone()))
.and(warp::query::<TopicParameter>())
.and_then(|ipfs, pubsub, TopicParameter { topic }| async move {
Ok::<_, warp::Rejection>(StreamResponse(inner_subscribe(ipfs, pubsub, topic).await))
})
}
async fn inner_subscribe<T: IpfsTypes>(
ipfs: Ipfs<T>,
pubsub: Arc<Pubsub>,
topic: String,
) -> impl TryStream<Ok = PreformattedJsonMessage, Error = StreamError> {
// accessing this through mutex bets on "most accesses would need write access" as in most
// requests would be asking for new subscriptions, which would require RwLock upgrading
// from write, which is not supported operation either.
let mut guard = pubsub.subscriptions.lock().await;
let mut rx = match guard.entry(topic) {
Entry::Occupied(oe) => {
// the easiest case: just join in, even if there are no other subscribers at the
// moment
debug!("joining in existing subscription of {:?}", oe.key());
oe.get().subscribe()
}
Entry::Vacant(ve) => {
let topic = ve.key().clone();
// the returned stream needs to be set up to be shoveled in a background task
let shoveled = ipfs
.pubsub_subscribe(topic.clone())
.await
.expect("new subscriptions shouldn't fail while holding the lock");
// using broadcast channel should allow us have N concurrent subscribes and
// preformatted json should give us good enough performance. this channel can last over
// multiple subscriptions and unsubscriptions.
let (tx, rx) = broadcast::channel::<Result<PreformattedJsonMessage, StreamError>>(16);
// this will be used to create more subscriptions
ve.insert(tx.clone());
let pubsub = Arc::clone(&pubsub);
// FIXME: handling this all efficiently in single task would require getting a
// stream of "all streams" from ipfs::p2p::Behaviour ... perhaps one could be added
// alongside the current "spread per topic" somehow?
tokio::spawn(shovel(ipfs, pubsub, topic, shoveled, tx));
rx
}
};
async_stream::stream! {
loop {
let next = rx.recv().await;
// map recv errors into the StreamError and flatten
let next = match next {
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => Err(StreamError::Recv),
Ok(next) => next.and_then(|n| Ok(n)),
};
let was_err = next.is_err();
yield next;
if was_err {
break;
}
}
}
}
/// Shovel task takes items from the [`SubscriptionStream`], formats them and passes them on to
/// response streams. Uses timeouts to attempt dropping subscriptions which no longer have
/// responses reading from them and resubscribes streams which get new requests.
async fn shovel<T: IpfsTypes>(
ipfs: Ipfs<T>,
pubsub: Arc<Pubsub>,
topic: String,
mut shoveled: ipfs::SubscriptionStream,
tx: broadcast::Sender<Result<PreformattedJsonMessage, StreamError>>,
) {
trace!(
"started background task for shoveling messages of {:?}",
topic
);
// related conformance test waits for 100ms
let check_every = Duration::from_millis(50);
loop {
// has the underlying stream been stopped by directly calling
// `Ipfs::pubsub_unsubscribe`
let mut unsubscribed = true;
loop {
let next = match timeout(check_every, shoveled.next()).await {
Ok(Some(next)) => preformat(next),
Ok(None) => break,
Err(_) => {
if tx.receiver_count() == 0 {
debug!("timed out shoveling with zero receivers");
break;
}
// nice thing about this timeout is that it reduces resubscription
// traffic, bad thing is that it is still work but then again it's
// done once per topic so it's not too much work.
continue;
}
};
if tx.send(next).is_err() {
// currently no more subscribers
unsubscribed = false;
break;
}
}
let mut guard = pubsub.subscriptions.lock().await;
// as this can take a long time to acquire the mutex, we might get a new
// subscriber in the between
if let Entry::Occupied(oe) = guard.entry(topic.clone()) {
if oe.get().receiver_count() > 0 {
if unsubscribed {
// this is tricky, se should obtain a new shoveled by resubscribing
// and reusing the existing broadcast::channel. this will fail if
// we introduce other Ipfs::pubsub_subscribe using code which does
// not use the `Pubsub` thing.
debug!(
"resubscribing with the existing broadcast channel to {:?}",
topic
);
shoveled = ipfs
.pubsub_subscribe(topic.clone())
.await
.expect("new subscriptions shouldn't fail while holding the lock");
} else {
trace!(
"got a new subscriber to existing broadcast channel on {:?}",
topic
);
}
// a new subscriber has appeared since our previous send failure.
continue;
}
// really no more subscribers, unsubscribe and terminate the shoveling
// task for this stream.
debug!("unsubscribing from {:?}", topic);
oe.remove();
return;
} else {
unreachable!(
"only way to remove subscriptions from
ipfs-http::v0::pubsub::Pubsub is through shoveling tasks exiting"
);
}
}
}
/// The two cases which can stop a pubsub/sub response generation.
// Any error from the stream wrapped in warp::hyper::Body will currently stop the request.
#[derive(Debug, Clone)]
enum StreamError {
/// Something went bad with the `serde_json`
Serialization,
/// Response is not keeping up with the stream (slow client)
Recv,
}
impl fmt::Display for StreamError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
StreamError::Serialization => write!(fmt, "failed to serialize received message"),
StreamError::Recv => write!(fmt, "consuming the stream too slowly"),
}
}
}
impl std::error::Error for StreamError {}
/// Another representation for ipfs::PubsubMessage, but with the Base64Pad encoded fields.
#[derive(Debug, Serialize)]
struct PubsubHttpApiMessage {
// Base64Pad encoded PeerId
from: String,
// Base64Pad encoded Vec<u8>
data: String,
// Base64Pad encoded sequence number (go-ipfs sends incrementing, rust-libp2p has random
// values)
seqno: String,
// Plain text topic names
#[serde(rename = "topicIDs")]
topics: Vec<String>,
}
impl<T> From<T> for PubsubHttpApiMessage
where
T: AsRef<ipfs::PubsubMessage>,
{
fn from(msg: T) -> Self {
use multibase::Base::Base64Pad;
let msg = msg.as_ref();
let from = Base64Pad.encode(msg.source.to_bytes());
let data = Base64Pad.encode(&msg.data);
let seqno = Base64Pad.encode(&msg.sequence_number);
let topics = msg.topics.clone();
PubsubHttpApiMessage {
from,
data,
seqno,
topics,
}
}
}
/// Bytes backed preformatted json + newline for subscription response stream.
#[derive(Clone)]
struct PreformattedJsonMessage(Bytes);
impl From<Bytes> for PreformattedJsonMessage {
fn from(b: Bytes) -> Self {
Self(b)
}
}
// This direction is required by warp::hyper::Body
impl From<PreformattedJsonMessage> for Bytes {
fn from(val: PreformattedJsonMessage) -> Self {
val.0
}
}
/// Formats the given pubsub message into json and a newline, as is the subscription format.
fn preformat(msg: impl AsRef<ipfs::PubsubMessage>) -> Result<PreformattedJsonMessage, StreamError> {
serde_json::to_vec(&PubsubHttpApiMessage::from(&msg))
.map(|mut vec| {
vec.push(b'\n');
vec
})
.map(Bytes::from)
.map(PreformattedJsonMessage::from)
.map_err(|e| {
error!("failed to serialize {:?}: {}", msg.as_ref(), e);
StreamError::Serialization
})
}
/// The "arg" for `pubsub/sub`
#[derive(Debug, Deserialize)]
struct TopicParameter {
#[serde(rename = "arg")]
topic: String,
}
/// The optional "arg" for `pubsub/peers`
#[derive(Debug, Deserialize)]
struct OptionalTopicParameter {
#[serde(rename = "arg")]
topic: Option<String>,
}
/// Generic response which should be moved to ipfs_http::v0
#[derive(Debug, Serialize)]
#[serde(rename_all = "PascalCase")]
struct StringListResponse {
strings: Vec<String>,
}
/// `pubsub/pub` is used by `go-ipfs` by including the topic in the query string and using body for
/// the message. `js-ipfs-http-client` uses query parameters for both. Currently only supports the
/// `js-ipfs-http-client` as `go-ipfs` doesn't send `Content-Length` with the body.
#[derive(Debug)]
struct PublishArgs {
topic: String,
message: QueryOrBody,
}
#[derive(Debug)]
enum QueryOrBody {
Query(Vec<u8>),
Body(Vec<u8>),
}
impl QueryOrBody {
fn into_inner(self) -> Vec<u8> {
match self {
Self::Query(x) | Self::Body(x) => x,
}
}
}
impl AsRef<[u8]> for PublishArgs {
fn as_ref(&self) -> &[u8] {
use QueryOrBody::*;
match &self.message {
Query(b) | Body(b) => b.as_slice(),
}
}
}
/// `parameter_name` is byte slice because there is no percent decoding done for that component.
fn publish_args(
parameter_name: &'static str,
) -> impl Filter<Extract = (PublishArgs,), Error = warp::Rejection> + Clone {
warp::filters::query::raw()
.and_then(move |s: String| {
let ret = if s.is_empty() {
Err(warp::reject::custom(RequiredArgumentMissing("topic")))
} else {
// sadly we can't use url::form_urlencoded::parse here as it will do lossy
// conversion to utf8 without us being able to recover the raw bytes, which are
// used by js-ipfs/ipfs-http-client to encode raw Buffers:
// https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-http-client/src/pubsub/publish.js
let parser = QueryAsRawPartsParser {
input: s.as_bytes(),
};
let mut args = parser
.filter(|&(k, _)| k == parameter_name.as_bytes())
.map(|t| t.1);
let first = args
.next()
// can't be missing
.ok_or_else(|| warp::reject::custom(RequiredArgumentMissing(parameter_name)))
// decode into Result<String, warp::Rejection>
.and_then(|raw_first| {
percent_encoding::percent_decode(raw_first)
.decode_utf8()
.map(|cow| cow.into_owned())
.map_err(|_| warp::reject::custom(NonUtf8Topic))
});
first.map(move |first| {
// continue to second arg, which may or may not be present
let second = args
.next()
.map(|slice| percent_encoding::percent_decode(slice).collect::<Vec<_>>())
.map(QueryOrBody::Query);
(first, second)
})
};
futures::future::ready(ret)
})
.and(warp::filters::header::optional::<Mime>("content-type"))
.and(warp::filters::body::stream())
.and_then(publish_args_inner)
}
async fn publish_args_inner(
(topic, opt_arg): (String, Option<QueryOrBody>),
content_type: Option<Mime>,
body: impl Stream<Item = Result<impl Buf, warp::Error>> + Unpin,
) -> Result<PublishArgs, Rejection> {
if let Some(message) = opt_arg {
Ok(PublishArgs { topic, message })
} else {
let boundary = content_type
.ok_or_else(|| StringError::from("message needs to be query or in multipart body"))?
.get_param("boundary")
.map(|v| v.to_string())
.ok_or_else(|| StringError::from("missing 'boundary' on content-type"))?;
let buffer = match try_only_named_multipart(&["file"], 1024 * 100, boundary, body).await {
Ok(buffer) if buffer.is_empty() => Ok(None),
Ok(buffer) => Ok(Some(buffer)),
Err(OnlyMultipartFailure::NotFound) => Ok(None),
Err(e) => Err(StringError::from(e)),
}?;
// this error is from conformance tests; the field name is different
let buffer = buffer.ok_or_else(|| StringError::from("argument \"data\" is required"))?;
Ok(PublishArgs {
topic,
message: QueryOrBody::Body(buffer),
})
}
}
struct QueryAsRawPartsParser<'a> {
input: &'a [u8],
}
// This has been monkey'd from https://github.com/servo/rust-url/blob/cce2d32015419b38f00c210430ecd3059105a7f2/src/form_urlencoded.rs
impl<'a> Iterator for QueryAsRawPartsParser<'a> {
type Item = (&'a [u8], &'a [u8]);
fn next(&mut self) -> Option<Self::Item> {
loop {
if self.input.is_empty() {
return None;
}
let mut split2 = self.input.splitn(2, |&b| b == b'&');
let sequence = split2.next().expect("splitn will always return first");
self.input = split2.next().unwrap_or_default();
if sequence.is_empty() {
continue;
}
let mut split2 = sequence.splitn(2, |&b| b == b'=');
let name = split2.next().expect("splitn will always return first");
let value = split2.next().unwrap_or_default();
// original implementation calls percent_decode for both arguments into lossy Cow<str>
return Some((name, value));
}
}
}
#[cfg(test)]
mod tests {
use super::{publish_args, PublishArgs};
use futures::future::ready;
use std::str;
use warp::reply::json;
use warp::{test::request, Filter, Rejection, Reply};
fn publish_args_as_json(
param: &'static str,
) -> impl Filter<Extract = impl Reply, Error = Rejection> {
publish_args(param).and_then(|p: PublishArgs| {
let message = str::from_utf8(p.as_ref()).unwrap();
ready(Ok::<_, warp::Rejection>(json(&serde_json::json!({
"message": message,
"topic": p.topic,
}))))
})
}
#[tokio::test]
async fn url_hacked_args() {
let response = request()
.path("/pubsub/pub?arg=some_channel&arg=foobar")
.reply(&publish_args_as_json("arg"))
.await;
let body = str::from_utf8(response.body()).unwrap();
assert_eq!(body, r#"{"message":"foobar","topic":"some_channel"}"#);
}
#[tokio::test]
async fn message_in_body() {
let response = request()
.path("/pubsub/pub?arg=some_channel")
.header(
"content-type",
"multipart/form-data; boundary=-----------------------------Z0oYi6XyTm7_x2L4ty8JL",
)
.body(
&b"-------------------------------Z0oYi6XyTm7_x2L4ty8JL\r\n\
Content-Disposition: form-data; name=\"file\"; filename=\"\"\r\n\
Content-Type: application/octet-stream\r\n\
\r\n\
aedFIxDJZ2jS1eVB6Pkbv\
\r\n-------------------------------Z0oYi6XyTm7_x2L4ty8JL--\r\n"[..],
)
.reply(&publish_args_as_json("arg"))
.await;
let body = str::from_utf8(response.body()).unwrap();
assert_eq!(
body,
r#"{"message":"aedFIxDJZ2jS1eVB6Pkbv","topic":"some_channel"}"#
);
}
}
|
use std::collections::HashSet;
use std::error;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(
name = "solve",
about = "solve the puzzles from the Advent of Code 2020, day 1"
)]
struct Opt {
#[structopt(short, long, default_value = "2020")]
year: i32,
/// path to input file; should contain integers, one per line
input: PathBuf,
}
type Error = Box<dyn error::Error>;
fn main() -> Result<(), Error> {
let opts = Opt::from_args();
let file = File::open(opts.input.clone())?;
let input = BufReader::new(file)
.lines()
.map(|read| read?.parse().map_err(Error::from))
.collect::<Result<Vec<i32>, Error>>()?;
println!("part one:");
if let Some((a, b)) = part_one(&input, opts.year) {
println!("{} * {} = {}", a, b, a * b);
} else {
eprintln!(
"No pair of numbers adding to {} exists in {:?}",
opts.year,
opts.input.as_os_str()
);
}
println!("part two:");
if let Some((a, b, c)) = part_two(&input, opts.year) {
println!("{} * {} * {} = {}", a, b, c, a * b * c);
} else {
eprintln!(
"No triple of numbers adding to {} exists in {:?}",
opts.year,
opts.input.as_os_str()
);
}
Ok(())
}
fn part_one(input: &[i32], total: i32) -> Option<(i32, i32)> {
let input_set: HashSet<i32> = input.iter().cloned().collect();
for &a in input.iter() {
let b = total - a;
if input_set.contains(&b) {
return Some((a, b));
}
}
return None;
}
fn part_two(input: &[i32], total: i32) -> Option<(i32, i32, i32)> {
let input_set: HashSet<i32> = input.iter().cloned().collect();
for (ix, &a) in input.iter().enumerate() {
for &b in input[ix + 1..].iter() {
let c = total - a - b;
if input_set.contains(&c) {
return Some((a, b, c));
}
}
}
return None;
}
|
extern crate green;
// #[actix_web::main]
fn main() -> std::io::Result<()> {
green::run()
}
|
use crate::query_log::QueryLog;
use arrow::{datatypes::SchemaRef, error::Result as ArrowResult, record_batch::RecordBatch};
use async_trait::async_trait;
use data_types::NamespaceId;
use datafusion::error::DataFusionError;
use datafusion::physical_plan::{DisplayAs, DisplayFormatType};
use datafusion::{
catalog::schema::SchemaProvider,
datasource::TableProvider,
error::Result as DataFusionResult,
execution::context::{SessionState, TaskContext},
logical_expr::TableType,
physical_plan::{
expressions::PhysicalSortExpr, ExecutionPlan, Partitioning, RecordBatchStream,
SendableRecordBatchStream, Statistics,
},
prelude::Expr,
};
use std::collections::HashMap;
use std::{
any::Any,
pin::Pin,
sync::Arc,
task::{Context, Poll},
};
mod queries;
pub const SYSTEM_SCHEMA: &str = "system";
const QUERIES_TABLE: &str = "queries";
pub struct SystemSchemaProvider {
tables: HashMap<&'static str, Arc<dyn TableProvider>>,
}
impl SystemSchemaProvider {
pub fn new(
query_log: Arc<QueryLog>,
namespace_id: NamespaceId,
include_debug_info: bool,
) -> Self {
let mut tables: HashMap<&'static str, Arc<dyn TableProvider>> = HashMap::new();
if include_debug_info {
let queries = Arc::new(SystemTableProvider {
table: Arc::new(queries::QueriesTable::new(query_log, Some(namespace_id))),
});
tables.insert(QUERIES_TABLE, queries);
}
Self { tables }
}
}
#[async_trait]
impl SchemaProvider for SystemSchemaProvider {
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
fn table_names(&self) -> Vec<String> {
let mut names = self
.tables
.keys()
.map(|s| (*s).to_owned())
.collect::<Vec<_>>();
names.sort();
names
}
async fn table(&self, name: &str) -> Option<Arc<dyn TableProvider>> {
self.tables.get(name).cloned()
}
fn table_exist(&self, name: &str) -> bool {
self.tables.contains_key(name)
}
}
type BatchIterator = Box<dyn Iterator<Item = ArrowResult<RecordBatch>> + Send + Sync>;
/// The minimal thing that a system table needs to implement
trait IoxSystemTable: Send + Sync {
/// Produce the schema from this system table
fn schema(&self) -> SchemaRef;
/// Get the contents of the system table
fn scan(&self, batch_size: usize) -> ArrowResult<BatchIterator>;
}
/// Adapter that makes any `IoxSystemTable` a DataFusion `TableProvider`
struct SystemTableProvider<T: IoxSystemTable> {
table: Arc<T>,
}
#[async_trait]
impl<T> TableProvider for SystemTableProvider<T>
where
T: IoxSystemTable + 'static,
{
fn as_any(&self) -> &dyn Any {
self
}
fn schema(&self) -> SchemaRef {
self.table.schema()
}
async fn scan(
&self,
_ctx: &SessionState,
projection: Option<&Vec<usize>>,
// It would be cool to push projection and limit down
_filters: &[Expr],
_limit: Option<usize>,
) -> DataFusionResult<Arc<dyn ExecutionPlan>> {
let schema = self.table.schema();
let projected_schema = match projection.as_ref() {
Some(projection) => Arc::new(schema.project(projection)?),
None => schema,
};
Ok(Arc::new(SystemTableExecutionPlan {
table: Arc::clone(&self.table),
projection: projection.cloned(),
projected_schema,
}))
}
fn table_type(&self) -> TableType {
TableType::Base
}
}
struct SystemTableExecutionPlan<T> {
table: Arc<T>,
projected_schema: SchemaRef,
projection: Option<Vec<usize>>,
}
impl<T> std::fmt::Debug for SystemTableExecutionPlan<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.fmt_as(DisplayFormatType::Default, f)
}
}
impl<T: IoxSystemTable + 'static> ExecutionPlan for SystemTableExecutionPlan<T> {
fn as_any(&self) -> &dyn Any {
self
}
fn schema(&self) -> SchemaRef {
Arc::clone(&self.projected_schema)
}
fn output_partitioning(&self) -> Partitioning {
Partitioning::UnknownPartitioning(1)
}
fn output_ordering(&self) -> Option<&[PhysicalSortExpr]> {
None
}
fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {
vec![]
}
fn with_new_children(
self: Arc<Self>,
_children: Vec<Arc<dyn ExecutionPlan>>,
) -> DataFusionResult<Arc<dyn ExecutionPlan>> {
unimplemented!()
}
fn execute(
&self,
_partition: usize,
context: Arc<TaskContext>,
) -> DataFusionResult<SendableRecordBatchStream> {
let batch_size = context.session_config().batch_size();
Ok(Box::pin(SystemTableStream {
projected_schema: Arc::clone(&self.projected_schema),
batches: self.table.scan(batch_size)?,
projection: self.projection.clone(),
}))
}
fn statistics(&self) -> Statistics {
Statistics::default()
}
}
impl<T> DisplayAs for SystemTableExecutionPlan<T> {
fn fmt_as(&self, t: DisplayFormatType, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match t {
DisplayFormatType::Default | DisplayFormatType::Verbose => f
.debug_struct("SystemTableExecutionPlan")
.field("projection", &self.projection)
.finish(),
}
}
}
struct SystemTableStream {
projected_schema: SchemaRef,
projection: Option<Vec<usize>>,
batches: BatchIterator,
}
impl RecordBatchStream for SystemTableStream {
fn schema(&self) -> SchemaRef {
Arc::clone(&self.projected_schema)
}
}
impl futures::Stream for SystemTableStream {
type Item = Result<RecordBatch, DataFusionError>;
fn poll_next(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Poll::Ready(self.batches.next().map(|maybe_batch| {
let batch = maybe_batch?;
match &self.projection {
Some(projection) => Ok(batch.project(projection)?),
None => Ok(batch),
}
}))
}
}
|
use accounts;
use accounts::types::*;
use db::Conn;
use rocket_contrib::json::Json;
use web::types::ApiResponse;
/// Get all users
#[get("/users")]
pub fn all_users(user: CurrentUser, conn: Conn) -> ApiResponse<Vec<User>> {
accounts::all_users(user, conn).map(|r| Json(r))
}
/// Get user by id
#[get("/users/<user_id>")]
pub fn user_by_id(user_id: i32, user: CurrentUser, conn: Conn) -> ApiResponse<User> {
accounts::user_by_id(user_id, user, conn).map(|r| Json(r))
}
/// Login
#[post("/login", format = "application/json", data = "<input>")]
pub fn login(conn: Conn, input: Json<LoginInput>) -> ApiResponse<AuthPayload> {
accounts::login(conn, input.into_inner()).map(|r| Json(r))
}
/// Register route
#[post("/register", format = "application/json", data = "<input>")]
pub fn register(conn: Conn, input: Json<RegistrationInput>) -> ApiResponse<AuthPayload> {
accounts::register(conn, input.into_inner()).map(|r| Json(r))
}
/// Register route
#[post("/users", format = "application/json", data = "<input>")]
pub fn create_user(
user: CurrentUser,
conn: Conn,
input: Json<CreateUserInput>,
) -> ApiResponse<User> {
accounts::create_user(user, conn, input.into_inner()).map(|r| Json(r))
}
/// Profiles
#[post(
"/users/<user_id>/profile",
format = "application/json",
data = "<input>"
)]
pub fn create_profile(
user_id: i32,
user: CurrentUser,
conn: Conn,
input: Json<ProfileInput>,
) -> ApiResponse<Profile> {
accounts::create_profile(conn, &user, user_id, &input.into_inner()).map(|r| Json(r))
}
#[put(
"/users/<user_id>/profile",
format = "application/json",
data = "<input>"
)]
pub fn update_profile(
user_id: i32,
user: CurrentUser,
conn: Conn,
input: Json<ProfileInput>,
) -> ApiResponse<Profile> {
accounts::update_profile(conn, &user, user_id, &input.into_inner()).map(|r| Json(r))
}
#[get("/users/<user_id>/profile", format = "application/json")]
pub fn get_profile(user_id: i32, user: CurrentUser, conn: Conn) -> ApiResponse<Profile> {
accounts::get_profile(user_id, user, conn).map(|r| Json(r))
}
|
extern crate libc;
use std::ptr;
use std::fmt::{Show, Formatter, FormatError};
use self::libc::{c_char, c_int};
use git2::error::{GitError, get_last_error};
extern {
fn git_oid_fromstrp(oid: *mut GitOid, s: *const c_char) -> c_int;
fn git_oid_cmp(a: *const GitOid, b: *const GitOid) -> c_int;
fn git_oid_tostr(out: *mut c_char, size: u32, obj: *const GitOid) -> *mut c_char;
}
/// Size (in bytes) of a raw/binary oid
pub static GIT_OID_RAWSZ: uint = 20;
/// Size (in bytes) of a hex formattted oid
pub static GIT_OID_HEXSZ: uint = (GIT_OID_RAWSZ * 2);
#[allow(dead_code)]
pub struct GitOid {
id: [u8, ..20]
}
pub struct OID {
_oid: GitOid
}
/// A trait for anything that can be converted into an OID
pub trait ToOID {
fn to_oid(&self) -> Result<OID, GitError>;
}
impl OID {
pub fn _new(o: *const GitOid) -> OID {
let mut new_oid : GitOid = GitOid{ id: [0,..20]};
unsafe { ptr::copy_memory(&mut new_oid, o, 1); }
OID{_oid: new_oid}
}
pub fn _get_ptr(&self) -> *const GitOid { &self._oid as *const GitOid }
/// Format a OID as a hex-formatted String
pub fn to_string(&self) -> String {
let mut s = ::std::string::String::new();
s.grow(GIT_OID_HEXSZ + 1, '+');
assert!(s.len() == GIT_OID_HEXSZ + 1);
let mut cstr = s.to_c_str();
unsafe {
git_oid_tostr(cstr.as_mut_ptr(), GIT_OID_HEXSZ as u32 + 1u32, self._get_ptr());
}
match cstr.as_str() {
None => fail!("Failed to get str!"),
Some(st) => st.into_string()
}
}
}
impl Show for OID {
fn fmt(&self, f: &mut Formatter) -> Result<(), FormatError> {
f.write(self.to_string().as_bytes())
}
}
impl PartialEq for OID {
fn eq(&self, other: &OID) -> bool {
unsafe { git_oid_cmp(self._get_ptr(), other._get_ptr()) == 0 }
}
}
impl<'a> ToOID for &'a str {
fn to_oid(&self) -> Result<OID, GitError> {
let mut p : GitOid = GitOid{id: [0,..20]};
let ret = unsafe {
git_oid_fromstrp(&mut p, self.to_c_str().unwrap())
};
if ret != 0 {
return Err(get_last_error());
}
return Ok(OID{_oid: p});
}
}
impl ToOID for OID {
fn to_oid(&self) -> Result<OID, GitError> { Ok(*self.clone()) }
}
|
extern crate uuid;
extern crate serde;
extern crate serde_json;
extern crate chrono;
extern crate snowflake;
mod order_id;
mod order_id_generator;
mod order_status;
pub mod policy;
use self::chrono::prelude::{DateTime, Utc};
use direction::Direction;
use symbol::SymbolId;
use execution::Execution;
pub use self::order_id::OrderId;
pub use self::order_id_generator::{UUIDOrderIdGenerator, GenerateOrderId};
pub use self::order_status::{OrderStatus, CancellationReason};
#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]
pub enum OrderKind {
MarketOrder,
LimitOrder(f64),
StopOrder(f64)
}
pub type OcaGroup = String;
#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]
pub struct Order {
id: OrderId,
symbol_id: SymbolId,
direction: Direction,
quantity: u32,
status: OrderStatus,
kind: OrderKind,
oca: Option<OcaGroup>,
active_until: Option<DateTime<Utc>>,
active_after: Option<DateTime<Utc>>,
}
impl Order {
pub fn id(&self) -> &OrderId {
&self.id
}
pub fn symbol_id(&self) -> &SymbolId {
&self.symbol_id
}
pub fn direction(&self) -> &Direction {
&self.direction
}
pub fn quantity(&self) -> u32 {
self.quantity
}
pub fn status(&self) -> &OrderStatus {
&self.status
}
pub fn set_status(&mut self, value: OrderStatus) {
self.status = value
}
pub fn kind(&self) -> &OrderKind {
&self.kind
}
pub fn oca(&self) -> &Option<OcaGroup> {
&self.oca
}
pub fn active_until(&self) -> &Option<DateTime<Utc>> {
&self.active_until
}
pub fn active_after(&self) -> &Option<DateTime<Utc>> {
&self.active_after
}
pub fn execution(&self) -> Option<&Execution> {
match *self.status() {
OrderStatus::Filled(ref execution) => Some(execution),
_ => None
}
}
}
#[derive(Clone, PartialEq, Debug)]
pub struct OrderBuilder {
id: Option<OrderId>,
symbol_id: SymbolId,
direction: Direction,
quantity: u32,
status: OrderStatus,
kind: OrderKind,
oca: Option<OcaGroup>,
active_until: Option<DateTime<Utc>>,
active_after: Option<DateTime<Utc>>,
}
impl OrderBuilder {
pub fn unallocated(kind: OrderKind, symbol_id: SymbolId,
direction: Direction) -> OrderBuilder
{
OrderBuilder {
id: None,
symbol_id: symbol_id,
direction: direction,
quantity: 0,
status: OrderStatus::NotSent,
kind: kind,
oca: None,
active_until: None,
active_after: None
}
}
pub fn kind(&self) -> &OrderKind {
&self.kind
}
pub fn symbol_id(&self) -> &SymbolId {
&self.symbol_id
}
pub fn direction(&self) -> &Direction {
&self.direction
}
pub fn id(&self) -> &Option<OrderId> {
&self.id
}
pub fn set_id(mut self, value: OrderId) -> Self {
self.id = Some(value);
self
}
pub fn status(&self) -> &OrderStatus {
&self.status
}
pub fn set_status(mut self, value: OrderStatus) -> Self {
self.status = value;
self
}
pub fn oca(&self) -> &Option<OcaGroup> {
&self.oca
}
pub fn set_oca(mut self, value: Option<OcaGroup>) -> Self {
self.oca = value;
self
}
pub fn active_until(&self) -> &Option<DateTime<Utc>> {
&self.active_until
}
pub fn set_active_until(mut self, value: Option<DateTime<Utc>>) -> Self {
self.active_until = value;
self
}
pub fn quantity(&self) -> u32 {
self.quantity
}
pub fn set_quantity(mut self, value: u32) -> Self {
self.quantity = value;
self
}
pub fn active_after(&self) -> &Option<DateTime<Utc>> {
&self.active_after
}
pub fn set_active_after(mut self, value: Option<DateTime<Utc>>) -> Self {
self.active_after = value;
self
}
pub fn build(self) -> Result<Order, BuildOrderError> {
Ok(
Order {
id: self.id.ok_or(BuildOrderError::UndefinedId)?,
symbol_id: self.symbol_id,
direction: self.direction,
quantity: self.quantity,
status: self.status,
kind: self.kind,
oca: self.oca,
active_until: self.active_until,
active_after: self.active_after
}
)
}
}
#[derive(Debug)]
pub enum BuildOrderError {
UndefinedId
}
|
use crate::{
graphics::{HitRecord, Ray},
math::Color,
};
use super::Material;
use rand::Rng;
pub struct Dielectric {
ir: f64,
}
impl Dielectric {
pub fn new(ir: f64) -> Dielectric {
Dielectric { ir }
}
}
impl Dielectric {
fn reflectance(cosine: f64, ref_idx: f64) -> f64 {
let r0 = (1.0 - ref_idx) / (1.0 + ref_idx);
let r0 = r0 * r0;
r0 + (1.0 - r0) * ((1.0 - cosine).powi(5))
}
}
impl Material for Dielectric {
fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<(Ray, Color)> {
let mut rng = rand::thread_rng();
let attenuation = Color::ONE;
let refraction_ratio = if hit.front_face {
1.0 / self.ir
} else {
self.ir
};
let unit_direction = ray.direction;
let cos_theta = (-unit_direction).dot(hit.normal).min(1.0);
let sin_theta = (1.0 - cos_theta.powi(2)).sqrt();
let cannot_refract = refraction_ratio * sin_theta > 1.0;
let direction =
if cannot_refract || Dielectric::reflectance(cos_theta, refraction_ratio) > rng.gen() {
unit_direction.reflect(hit.normal)
} else {
unit_direction.refract(hit.normal, refraction_ratio)
};
Some((Ray::new(hit.p, direction, ray.time), attenuation))
}
}
|
use clap::{App, Arg};
use regex::{Regex, RegexBuilder};
use std::{thread, time};
use std::process::exit;
use sysinfo::{ProcessExt, System, SystemExt, Signal, Process};
fn main() {
let matches = App::new("process-killer")
.version("0.3.0")
.about("A simple utility for for terminating processes quickly and cleanly.")
.arg(Arg::with_name("pattern")
.help("All processes that contain this pattern will be killed. Case insensitive by default.")
.index(1)
.required(true))
.arg(Arg::with_name("wait-ms")
.help("How many milliseconds to wait for the processes to gracefully terminate before force killing them.")
.takes_value(true)
.long("wait-ms")
.short("w")
.default_value("3000")
)
.arg(Arg::with_name("regex")
.help("Interpret the pattern as a regular expression")
.long("regex")
.short("r"))
.arg(Arg::with_name("case-sensitive")
.help("Make pattern to be case sensitive")
.long("case-sensitive")
.short("c"))
.get_matches();
let pattern = matches
.value_of("pattern")
.expect("Error while getting pattern.");
if pattern == "" {
eprintln!("Pattern should not be empty.");
exit(-1);
}
let regular_expression = matches.is_present("regex");
let case_sensitive = matches.is_present("case-sensitive");
let wait_ms: u64 = matches
.value_of("wait-ms")
.expect("Error while getting the value of wait-ms.")
.parse()
.expect("Wait_ms was not a number");
let s = System::new_all();
let processes_iterator = s.get_processes().iter().map(|(_pid, process)| process);
let matching: Vec<&Process> = if regular_expression {
let regex: Regex = RegexBuilder::new(pattern)
.case_insensitive(!case_sensitive)
.build()
.expect("Regular expression was not valid");
processes_iterator
.filter(|process| regex.is_match(process.name()))
.collect()
} else {
processes_iterator
.filter(|process| {
let mut name: String = process.name().to_string();
let mut filter_pattern = pattern.to_string();
if !case_sensitive {
name = name.to_lowercase();
filter_pattern = filter_pattern.to_lowercase();
}
return name.contains(&filter_pattern);
})
.collect()
};
matching
.iter()
.for_each(|process| {
println!("pid: {}, process: {:?}, status: {}",
process.pid(),
process.name(),
process.status())
});
if matching.len() == 0 {
eprintln!("No processes found");
exit(-1);
}
println!("\nKilling processes with sigterm");
matching
.iter()
.for_each(|process| { process.kill(Signal::Term); });
thread::sleep(time::Duration::from_millis(100));
// We're doing waiting in 100ms steps so that we can exit as soon as the
// processes terminate.
let max_wait_steps = wait_ms / 100;
for _ in 1..(max_wait_steps + 1) {
if get_alive_processes(&matching, &System::new()).len() == 0 {
exit(0);
}
thread::sleep(time::Duration::from_millis(wait_ms / max_wait_steps));
}
let s = System::new();
let remaining = get_alive_processes(&matching, &s);
if remaining.len() == 0 {
exit(0);
}
println!("The following processes did not terminate:");
remaining
.iter()
.for_each(|process| println!("pid: {}, status: {}", process.pid(), process.status()));
println!("\nKilling processes with sigkill");
matching
.iter()
.for_each(|process| { process.kill(Signal::Kill); });
thread::sleep(time::Duration::from_millis(1));
}
fn get_alive_processes<'a>(processes: &Vec<&'a Process>, system: &'a System) -> Vec<&'a Process> {
processes
.into_iter()
.filter_map(move |process| system.get_process(process.pid()))
.collect()
}
|
mod collidable;
mod simplex;
pub use collidable::*;
pub use simplex::*; |
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct CYPHER_BLOCK {
pub data: [super::super::Foundation::CHAR; 8],
}
#[cfg(feature = "Win32_Foundation")]
impl CYPHER_BLOCK {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::default::Default for CYPHER_BLOCK {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::fmt::Debug for CYPHER_BLOCK {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("CYPHER_BLOCK").field("data", &self.data).finish()
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::PartialEq for CYPHER_BLOCK {
fn eq(&self, other: &Self) -> bool {
self.data == other.data
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::Eq for CYPHER_BLOCK {}
#[cfg(feature = "Win32_Foundation")]
unsafe impl ::windows::core::Abi for CYPHER_BLOCK {
type Abi = Self;
}
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct ENCRYPTED_LM_OWF_PASSWORD {
pub data: [CYPHER_BLOCK; 2],
}
#[cfg(feature = "Win32_Foundation")]
impl ENCRYPTED_LM_OWF_PASSWORD {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::default::Default for ENCRYPTED_LM_OWF_PASSWORD {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::fmt::Debug for ENCRYPTED_LM_OWF_PASSWORD {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("ENCRYPTED_LM_OWF_PASSWORD").field("data", &self.data).finish()
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::PartialEq for ENCRYPTED_LM_OWF_PASSWORD {
fn eq(&self, other: &Self) -> bool {
self.data == other.data
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::Eq for ENCRYPTED_LM_OWF_PASSWORD {}
#[cfg(feature = "Win32_Foundation")]
unsafe impl ::windows::core::Abi for ENCRYPTED_LM_OWF_PASSWORD {
type Abi = Self;
}
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct LM_OWF_PASSWORD {
pub data: [CYPHER_BLOCK; 2],
}
#[cfg(feature = "Win32_Foundation")]
impl LM_OWF_PASSWORD {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::default::Default for LM_OWF_PASSWORD {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::fmt::Debug for LM_OWF_PASSWORD {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("LM_OWF_PASSWORD").field("data", &self.data).finish()
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::PartialEq for LM_OWF_PASSWORD {
fn eq(&self, other: &Self) -> bool {
self.data == other.data
}
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::cmp::Eq for LM_OWF_PASSWORD {}
#[cfg(feature = "Win32_Foundation")]
unsafe impl ::windows::core::Abi for LM_OWF_PASSWORD {
type Abi = Self;
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn MSChapSrvChangePassword<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::PWSTR>, Param1: ::windows::core::IntoParam<'a, super::super::Foundation::PWSTR>, Param2: ::windows::core::IntoParam<'a, super::super::Foundation::BOOLEAN>>(servername: Param0, username: Param1, lmoldpresent: Param2, lmoldowfpassword: *const LM_OWF_PASSWORD, lmnewowfpassword: *const LM_OWF_PASSWORD, ntoldowfpassword: *const LM_OWF_PASSWORD, ntnewowfpassword: *const LM_OWF_PASSWORD) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn MSChapSrvChangePassword(servername: super::super::Foundation::PWSTR, username: super::super::Foundation::PWSTR, lmoldpresent: super::super::Foundation::BOOLEAN, lmoldowfpassword: *const LM_OWF_PASSWORD, lmnewowfpassword: *const LM_OWF_PASSWORD, ntoldowfpassword: *const LM_OWF_PASSWORD, ntnewowfpassword: *const LM_OWF_PASSWORD) -> u32;
}
::core::mem::transmute(MSChapSrvChangePassword(servername.into_param().abi(), username.into_param().abi(), lmoldpresent.into_param().abi(), ::core::mem::transmute(lmoldowfpassword), ::core::mem::transmute(lmnewowfpassword), ::core::mem::transmute(ntoldowfpassword), ::core::mem::transmute(ntnewowfpassword)))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn MSChapSrvChangePassword2<'a, Param0: ::windows::core::IntoParam<'a, super::super::Foundation::PWSTR>, Param1: ::windows::core::IntoParam<'a, super::super::Foundation::PWSTR>, Param4: ::windows::core::IntoParam<'a, super::super::Foundation::BOOLEAN>>(
servername: Param0,
username: Param1,
newpasswordencryptedwitholdnt: *const SAMPR_ENCRYPTED_USER_PASSWORD,
oldntowfpasswordencryptedwithnewnt: *const ENCRYPTED_LM_OWF_PASSWORD,
lmpresent: Param4,
newpasswordencryptedwitholdlm: *const SAMPR_ENCRYPTED_USER_PASSWORD,
oldlmowfpasswordencryptedwithnewlmornt: *const ENCRYPTED_LM_OWF_PASSWORD,
) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn MSChapSrvChangePassword2(servername: super::super::Foundation::PWSTR, username: super::super::Foundation::PWSTR, newpasswordencryptedwitholdnt: *const SAMPR_ENCRYPTED_USER_PASSWORD, oldntowfpasswordencryptedwithnewnt: *const ENCRYPTED_LM_OWF_PASSWORD, lmpresent: super::super::Foundation::BOOLEAN, newpasswordencryptedwitholdlm: *const SAMPR_ENCRYPTED_USER_PASSWORD, oldlmowfpasswordencryptedwithnewlmornt: *const ENCRYPTED_LM_OWF_PASSWORD) -> u32;
}
::core::mem::transmute(MSChapSrvChangePassword2(
servername.into_param().abi(),
username.into_param().abi(),
::core::mem::transmute(newpasswordencryptedwitholdnt),
::core::mem::transmute(oldntowfpasswordencryptedwithnewnt),
lmpresent.into_param().abi(),
::core::mem::transmute(newpasswordencryptedwitholdlm),
::core::mem::transmute(oldlmowfpasswordencryptedwithnewlmornt),
))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
pub struct SAMPR_ENCRYPTED_USER_PASSWORD {
pub Buffer: [u8; 516],
}
impl SAMPR_ENCRYPTED_USER_PASSWORD {}
impl ::core::default::Default for SAMPR_ENCRYPTED_USER_PASSWORD {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
impl ::core::fmt::Debug for SAMPR_ENCRYPTED_USER_PASSWORD {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("SAMPR_ENCRYPTED_USER_PASSWORD").field("Buffer", &self.Buffer).finish()
}
}
impl ::core::cmp::PartialEq for SAMPR_ENCRYPTED_USER_PASSWORD {
fn eq(&self, other: &Self) -> bool {
self.Buffer == other.Buffer
}
}
impl ::core::cmp::Eq for SAMPR_ENCRYPTED_USER_PASSWORD {}
unsafe impl ::windows::core::Abi for SAMPR_ENCRYPTED_USER_PASSWORD {
type Abi = Self;
}
|
// Copyright 2017 rust-ipfs-api Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//
use ipfs_api_examples::ipfs_api::{response, IpfsApi, IpfsClient};
fn print_recursive(indent: usize, cmd: &response::CommandsResponse) {
let cmd_indent = " ".repeat(indent * 4);
let opt_indent = " ".repeat((indent + 1) * 4);
eprintln!("{}[{}]", cmd_indent, cmd.name);
if cmd.options.len() > 0 {
eprintln!("{}* options:", cmd_indent);
for options in cmd.options.iter() {
eprintln!("{}{}", opt_indent, &options.names[..].join(", "));
}
}
if cmd.subcommands.len() > 0 {
eprintln!("{}- subcommands:", cmd_indent);
for subcommand in cmd.subcommands.iter() {
print_recursive(indent + 1, subcommand);
}
}
}
// Creates an Ipfs client, and gets a list of available commands from the
// Ipfs server.
//
#[ipfs_api_examples::main]
async fn main() {
tracing_subscriber::fmt::init();
eprintln!("connecting to localhost:5001...");
let client = IpfsClient::default();
match client.commands().await {
Ok(commands) => print_recursive(0, &commands),
Err(e) => eprintln!("error getting commands: {}", e),
}
}
|
fn main() {
let input: String = include_str!("input").split("").filter(|x| !x.is_empty()).map(|digit| {
let dec = u8::from_str_radix(digit, 16).unwrap();
let bin = format!("{:b}", dec);
format!("{:0>4}", bin)
}).collect();
let packet = parse_next_packet(input).0;
println!("Part one: {}", packet.get_sum_version_num());
println!("Part two: {}", packet.get_value());
}
fn parse_next_packet(inp: String) -> (Box<dyn Packet>, String) {
let header: Header = inp.parse().unwrap();
if header.type_id == 4 {
let (value, remaining) = parse_literal(inp[6..].to_owned());
let packet = LiteralPacket {
header,
value
};
return (Box::new(packet), remaining);
}
let remaining_string: String;
let inner_packets;
match &inp[6..7] {
"0" => {
let inner_packet_bits_len = usize::from_str_radix(&inp[7..22], 2).unwrap();
inner_packets = parse_all_packets(inp[22..(22+inner_packet_bits_len)].to_owned());
remaining_string = inp[(22+inner_packet_bits_len)..].to_owned();
},
"1" => {
let inner_packet_len = usize::from_str_radix(&inp[7..18], 2).unwrap();
let (packets, new_remaing_string) = parse_n_packets(inp[18..].to_owned(), inner_packet_len);
inner_packets = packets;
remaining_string = new_remaing_string;
}
_ => panic!("Invalid bit")
};
let operation = match header.type_id {
0 => Operation::Sum,
1 => Operation::Product,
2 => Operation::Min,
3 => Operation::Max,
5 => Operation::GreaterThan,
6 => Operation::LessThan,
7 => Operation::EqualTo,
_ => panic!("Invalid type_id")
};
let packet = OperationalPacket {
header,
operation,
inner: inner_packets
};
(Box::new(packet), remaining_string)
}
fn parse_all_packets(bit_string: String) -> Vec<Box<dyn Packet>> {
let mut packets = vec!();
let mut remaining_string = bit_string;
while remaining_string.len() != 0 {
let (p, new_remaing_string) = parse_next_packet(remaining_string);
remaining_string = new_remaing_string;
packets.push(p);
};
packets
}
fn parse_n_packets(bit_string: String, n: usize) -> (Vec<Box<dyn Packet>>, String) {
let mut packets = vec!();
let mut remaining_string = bit_string;
for _ in 0..n {
let (packet, new_remaing_string) = parse_next_packet(remaining_string);
remaining_string = new_remaing_string;
packets.push(packet);
};
(packets, remaining_string)
}
fn parse_literal(bit_string: String) -> (usize, String) {
let index = bit_string.chars().step_by(5).enumerate().find(|(_, ch)| ch == &'0').unwrap().0*5 + 5;
let number: String = bit_string[0..index].chars().enumerate().filter(|(i, _)| i%5 != 0).map(|(_, ch)| ch).collect();
(usize::from_str_radix(&number, 2).unwrap(), bit_string[index..].to_owned())
}
trait Packet {
fn get_value(&self) -> usize;
fn get_sum_version_num(&self) -> usize;
}
struct LiteralPacket {
header: Header,
value: usize
}
impl Packet for LiteralPacket {
fn get_value(&self) -> usize {
self.value
}
fn get_sum_version_num(&self) -> usize {
self.header.version as usize
}
}
struct OperationalPacket {
header: Header,
operation: Operation,
inner: Vec<Box<dyn Packet>>
}
impl Packet for OperationalPacket {
fn get_value(&self) -> usize {
use Operation::*;
match self.operation {
Sum => self.inner.iter().map(|x| x.get_value()).sum(),
Product => self.inner.iter().map(|x| x.get_value()).product(),
Min => self.inner.iter().map(|x| x.get_value()).min().unwrap(),
Max => self.inner.iter().map(|x| x.get_value()).max().unwrap(),
GreaterThan => if self.inner[0].get_value() > self.inner[1].get_value() { 1 } else { 0 },
LessThan => if self.inner[0].get_value() < self.inner[1].get_value() { 1 } else { 0 },
EqualTo => if self.inner[0].get_value() == self.inner[1].get_value() { 1 } else { 0 },
}
}
fn get_sum_version_num(&self) -> usize {
(self.header.version as usize) + self.inner.iter().map(|x| x.get_sum_version_num()).sum::<usize>()
}
}
enum Operation {
Sum,
Product,
Min,
Max,
GreaterThan,
LessThan,
EqualTo,
}
#[derive(Clone, Copy)]
struct Header {
version: u8,
type_id: u8
}
impl std::str::FromStr for Header {
type Err = &'static str;
fn from_str<'a>(s: &'a str) -> Result<Self, Self::Err> {
let version = u8::from_str_radix(&s[0..3], 2).unwrap();
let type_id = u8::from_str_radix(&s[3..6], 2).unwrap();
Ok(
Header {
version,
type_id
}
)
}
}
|
use super::errors::{Error, ParamType};
use super::eval::eval_type;
use crate::flat::*;
use crate::lexer::Span;
use crate::raw::Spanned;
use std::collections::HashMap;
// NOTE: this and type check can be modified not to take the cache arg. instead,
// it will return a Kind and the Names that it has traversed, and the caller
// can store the fact that each of those Names has the returned kind.
pub fn kind_check(
ty: Spanned<&Type>,
scope: &Libraries,
cache: &mut HashMap<Name, Kind>,
) -> Result<Kind, Vec<Error>> {
let seen = Vec::new();
_kind_check(ty, scope, cache, seen)
}
fn _kind_check(
ty: Spanned<&Type>,
scope: &Libraries,
cache: &mut HashMap<Name, Kind>,
mut seen: Vec<(Name, Span)>,
) -> Result<Kind, Vec<Error>> {
match &ty.value {
Type::Identifier(name) => {
if let Some(kind) = cache.get(name) {
return Ok(*kind);
}
if seen.iter().any(|(n, _)| n == name) {
return Err(Error::VarCycle(seen).into());
}
// NOTE: once Name is refactored just contain IDs, it can implement Copy and we won't
// need to clone explicitly here
seen.push((name.clone(), ty.span));
let kind = _kind_check(
scope.get_type(ty.span.wrap(name))?.into(),
scope,
cache,
seen,
);
cache.insert(
name.clone(),
match kind {
Ok(k) => k,
Err(_) => Kind::Any,
},
);
kind
}
Type::Array(Array { element_type, size }) => Ok(Kind::Kind {
layout: Param::required(element_type),
constraints: Param::required(size),
}),
Type::Vector(Vector {
element_type,
bounds,
}) => Ok(Kind::Kind {
layout: Param::required(element_type),
constraints: Param::optional(bounds),
}),
Type::Str(Str { bounds }) => Ok(Kind::Kind {
layout: Param::None,
constraints: Param::optional(bounds),
}),
Type::TypeSubstitution(TypeSubstitution {
func,
layout: layout_param,
constraint: constraint_param,
}) => {
let func_kind = _kind_check(func.into(), scope, cache, seen)?;
let func_def = match &*func.value {
Type::Identifier(name) => Some(scope.get_type(ty.span.wrap(name))?.span),
_ => None,
};
match func_kind {
Kind::Any => Ok(Kind::Any),
Kind::Kind {
layout,
constraints,
} => {
let mut errors = Vec::new();
let layout = layout
.take(layout_param)
.map_err(|_| Error::InvalidTypeParam {
func_call: func.span,
param: ParamType::Layout,
func_def,
});
let constraints =
constraints
.take(constraint_param)
.map_err(|_| Error::InvalidTypeParam {
func_call: func.span,
param: ParamType::Constraint,
func_def,
});
if let Err(err) = layout.clone() {
errors.push(err);
}
if let Err(err) = constraints.clone() {
errors.push(err);
}
// NOTE: we error if an argument is provided that is not supported by the func type,
// but don't if an argument that is "needed" is not provided (so we provide some
// "currying" like behavior to match fidlc's type constructors)
if errors.is_empty() {
Ok(Kind::Kind {
layout: layout.unwrap(),
constraints: constraints.unwrap(),
})
} else {
Err(errors)
}
}
}
}
// TODO: think about this some more
Type::Any => Ok(Kind::Any),
_ => Ok(Kind::base_kind()),
}
}
// really the caller should handle Kind::Any to have whatever result it expects,
// but currently fidlc will only ever expect concrete types so this hardcoded
// here
impl Kind {
pub fn is_concrete(&self) -> bool {
match self {
Kind::Any => true,
Kind::Kind {
layout,
constraints,
} => !layout.needs_value() || !constraints.needs_value(),
}
}
pub fn missing(&self) -> Vec<ParamType> {
let mut missing = Vec::new();
match self {
Kind::Any => missing,
Kind::Kind {
layout,
constraints,
} => {
if layout.needs_value() {
missing.push(ParamType::Layout);
}
if constraints.needs_value() {
missing.push(ParamType::Constraint);
}
missing
}
}
}
}
pub fn recursion_check(ty: Spanned<&Type>, scope: &Libraries, cache: &HashMap<Name, Kind>) -> Result<(), Vec<(Name, Span)>> {
let mut seen = Vec::new();
if !can_be_finite(ty, scope, &mut seen, cache) {
Err(seen)
} else {
Ok(())
}
}
fn can_be_finite(ty: Spanned<&Type>, scope: &Libraries, seen: &mut Vec<(Name, Span)>, cache: &HashMap<Name, Kind>) -> bool {
match ty.value {
Type::Struct(Struct { members }) => members
.iter()
.all(|member| can_be_finite((&member.value.ty).into(), scope, seen, cache)),
Type::Table(_) => true,
Type::Union(Union { members, .. }) => {
members.iter().any(|member| match member.value.inner {
UnionMemberInner::Reserved => false,
UnionMemberInner::Used { ref ty, .. } => can_be_finite(ty.into(), scope, seen, cache),
})
}
Type::Identifier(name) => {
if let Some(Kind::Any) = cache.get(name) {
// don't duplicate cycle errors
return true;
}
if seen.iter().any(|(n, _)| n == name) {
return false;
}
seen.push((name.clone(), ty.span));
let ty = scope.get_type(ty.span.wrap(name)).unwrap();
let can_be_finite = can_be_finite(ty.into(), scope, seen, cache);
if can_be_finite {
seen.pop();
}
can_be_finite
}
Type::Ptr(_) => true,
Type::Array(Array { element_type, .. }) => {
if let Some(ref inner) = element_type {
can_be_finite(inner.into(), scope, seen, cache)
} else {
true
}
}
// a vector could always have 0 elements
Type::Vector(_) | Type::Str(_) => true,
Type::TypeSubstitution(_) => {
let evaled = eval_type(ty.into(), scope).unwrap();
can_be_finite(evaled.span.wrap(&evaled.value), scope, seen, cache)
}
_ => true,
}
}
|
// Copyright 2018 Grove Enterprises LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Execution of a projection
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use arrow::array::ArrayRef;
use arrow::datatypes::{Field, Schema};
use arrow::record_batch::RecordBatch;
use super::error::Result;
use super::expression::RuntimeExpr;
use super::relation::Relation;
pub struct ProjectRelation {
schema: Arc<Schema>,
input: Rc<RefCell<Relation>>,
expr: Vec<RuntimeExpr>,
}
impl ProjectRelation {
pub fn new(input: Rc<RefCell<Relation>>, expr: Vec<RuntimeExpr>, schema: Arc<Schema>) -> Self {
ProjectRelation {
input,
expr,
schema,
}
}
}
impl Relation for ProjectRelation {
fn next(&mut self) -> Result<Option<RecordBatch>> {
match self.input.borrow_mut().next()? {
Some(batch) => {
let projected_columns: Result<Vec<ArrayRef>> =
self.expr.iter().map(|e| e.get_func()(&batch)).collect();
let schema = Schema::new(
self.expr
.iter()
.map(|e| Field::new(&e.get_name(), e.get_type(), true))
.collect(),
);
let projected_batch: RecordBatch =
RecordBatch::new(Arc::new(schema), projected_columns?);
Ok(Some(projected_batch))
}
None => Ok(None),
}
}
fn schema(&self) -> &Arc<Schema> {
&self.schema
}
}
#[cfg(test)]
mod tests {
use super::super::super::logicalplan::Expr;
use super::super::context::ExecutionContext;
use super::super::datasource::CsvDataSource;
use super::super::expression;
use super::super::relation::DataSourceRelation;
use super::*;
use arrow::datatypes::{DataType, Field, Schema};
#[test]
fn project_all_columns() {
let schema = Arc::new(Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("first_name", DataType::Utf8, false),
]));
let ds = CsvDataSource::new("test/data/people.csv", schema.clone(), 1024);
let relation = Rc::new(RefCell::new(DataSourceRelation::new(Rc::new(
RefCell::new(ds),
))));
let context = ExecutionContext::new();
let projection_expr =
vec![expression::compile_expr(&context, &Expr::Column(0), schema.as_ref()).unwrap()];
let mut projection = ProjectRelation::new(relation, projection_expr, schema);
let batch = projection.next().unwrap().unwrap();
assert_eq!(1, batch.num_columns());
assert_eq!("id", batch.schema().field(0).name());
}
}
|
//! Synchronized droppable share-lock around internal state data.
use crate::inv_error::*;
use parking_lot::{Mutex, RwLock};
use std::sync::atomic;
use std::sync::Arc;
/// Callback for delayed initialization of InvShare
pub type InvShareInitCb<T> = Box<dyn FnOnce(T) + 'static + Send>;
/// Synchronized droppable share-lock around internal state data.
pub struct InvShare<T: 'static + Send>(
Arc<dyn AsShareSDyn<T> + 'static + Send + Sync>,
);
impl<T: 'static + Send> Clone for InvShare<T> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<T: 'static + Send> PartialEq for InvShare<T> {
fn eq(&self, oth: &Self) -> bool {
self.0.dyn_eq(&oth.0)
}
}
impl<T: 'static + Send> Eq for InvShare<T> {}
impl<T: 'static + Send> std::hash::Hash for InvShare<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.dyn_hash(state);
}
}
impl<T: 'static + Send + Sync> InvShare<T> {
/// Create a new share lock, backed by a parking_lot::RwLock.
pub fn new_rw_lock(t: T) -> Self {
let (i, cb) = Self::new_rw_lock_delayed();
cb(t);
i
}
/// Create a new share lock, backed by a parking_lot::RwLock.
/// Api calls before initializer will Err(ConnectionReset).
pub fn new_rw_lock_delayed() -> (Self, InvShareInitCb<T>) {
let (i, cb) = SDynRwLock::new();
(Self(i), cb)
}
}
impl<T: 'static + Send> InvShare<T> {
/// Create a new share lock, backed by a parking_lot::Mutex.
pub fn new_mutex(t: T) -> Self {
let (i, cb) = Self::new_mutex_delayed();
cb(t);
i
}
/// Create a new share lock, backed by a parking_lot::Mutex.
/// Api calls before initializer will Err(ConnectionReset).
pub fn new_mutex_delayed() -> (Self, InvShareInitCb<T>) {
let (i, cb) = SDynMutex::new();
(Self(i), cb)
}
/// Execute code with read-only access to the internal state.
pub fn share_ref<R, F>(&self, f: F) -> InvResult<R>
where
F: FnOnce(&T) -> InvResult<R>,
{
let guard = self.0.get_ref();
if guard.is_none() {
return Err(std::io::ErrorKind::ConnectionReset.into());
}
f((**guard).as_ref().unwrap())
}
/// Execute code with mut access to the internal state.
/// The second param, if set to true, will drop the shared state,
/// any further access will `Err(ConnectionAborted)`.
/// E.g. `share.share_mut(|_state, close| *close = true).unwrap();`
pub fn share_mut<R, F>(&self, f: F) -> InvResult<R>
where
F: FnOnce(&mut T, &mut bool) -> InvResult<R>,
{
let (r, v) = {
let mut guard = self.0.get_mut();
if guard.is_none() {
return Err(std::io::ErrorKind::ConnectionReset.into());
}
let mut close = false;
let r = f((**guard).as_mut().unwrap(), &mut close);
if close {
let v = guard.take();
(r, v)
} else {
(r, None)
}
};
// make sure the lock is released before drop
drop(v);
r
}
/// Extract the contents of this share, closing it in the process.
/// If the share was already closed, will return None.
pub fn extract(&self) -> Option<T> {
self.0.get_mut().take()
}
/// Returns true if the internal state has been dropped.
pub fn is_closed(&self) -> bool {
self.0.get_ref().is_none()
}
/// Explicity drop the internal state.
pub fn close(&self) {
let v = self.0.get_mut().take();
// make sure the lock is released before drop
drop(v);
}
}
// -- private -- //
static UNIQ: atomic::AtomicU64 = atomic::AtomicU64::new(1);
type SDynGuardRef<'lt, T> = Box<dyn std::ops::Deref<Target = Option<T>> + 'lt>;
type SDynGuardMut<'lt, T> =
Box<dyn std::ops::DerefMut<Target = Option<T>> + 'lt>;
trait AsShareSDyn<T: 'static + Send> {
fn get_ref(&self) -> SDynGuardRef<'_, T>;
fn get_mut(&self) -> SDynGuardMut<'_, T>;
fn dyn_eq(&self, oth: &dyn std::any::Any) -> bool;
fn dyn_hash(&self, hasher: &mut dyn std::hash::Hasher);
}
struct SDynMutex<T: 'static + Send>(Mutex<Option<T>>, u64);
impl<T: 'static + Send> SDynMutex<T> {
fn new() -> (Arc<Self>, InvShareInitCb<T>) {
let this = Arc::new(Self(
Mutex::new(None),
UNIQ.fetch_add(1, atomic::Ordering::Relaxed),
));
let this2 = this.clone();
let cb: InvShareInitCb<T> = Box::new(move |t| {
*this2.0.lock() = Some(t);
});
(this, cb)
}
}
impl<T: 'static + Send> AsShareSDyn<T> for SDynMutex<T> {
fn get_ref(&self) -> SDynGuardRef<'_, T> {
Box::new(self.0.lock())
}
fn get_mut(&self) -> SDynGuardMut<'_, T> {
Box::new(self.0.lock())
}
fn dyn_eq(&self, oth: &dyn std::any::Any) -> bool {
let c: &Self = match <dyn std::any::Any>::downcast_ref(oth) {
None => return false,
Some(c) => c,
};
self.1 == c.1
}
fn dyn_hash(&self, hasher: &mut dyn std::hash::Hasher) {
std::hash::Hash::hash(&self.1, &mut Box::new(hasher))
}
}
struct SDynRwLock<T: 'static + Send + Sync>(RwLock<Option<T>>, u64);
impl<T: 'static + Send + Sync> SDynRwLock<T> {
fn new() -> (Arc<Self>, InvShareInitCb<T>) {
let this = Arc::new(Self(
RwLock::new(None),
UNIQ.fetch_add(1, atomic::Ordering::Relaxed),
));
let this2 = this.clone();
let cb: InvShareInitCb<T> = Box::new(move |t| {
*this2.0.write() = Some(t);
});
(this, cb)
}
}
impl<T: 'static + Send + Sync> AsShareSDyn<T> for SDynRwLock<T> {
fn get_ref(&self) -> SDynGuardRef<'_, T> {
Box::new(self.0.read())
}
fn get_mut(&self) -> SDynGuardMut<'_, T> {
Box::new(self.0.write())
}
fn dyn_eq(&self, oth: &dyn std::any::Any) -> bool {
let c: &Self = match <dyn std::any::Any>::downcast_ref(oth) {
None => return false,
Some(c) => c,
};
self.1 == c.1
}
fn dyn_hash(&self, hasher: &mut dyn std::hash::Hasher) {
std::hash::Hash::hash(&self.1, &mut Box::new(hasher))
}
}
|
use std::process::{Command, Child};
use env::EnvList;
use super::Result;
use error::BenvError;
pub fn run(program_with_args: &str, env_list: EnvList) -> Result<Child> {
let (program, args) = try!(split_program_and_args(program_with_args));
let mut command = Command::new(&program);
command.args(&args);
for env in env_list {
command.env(env.name, env.value);
}
let child = try!(command.spawn());
Ok(child)
}
fn split_program_and_args(program_with_args: &str) -> Result<(String, Vec<&str>)> {
// Life would have been good...
// match program_with_args.split_whitespace() {
// [program, .. args] => (program.to_string(), args.to_string())
// }
let mut vec: Vec<&str> = program_with_args.split_whitespace().collect();
if vec.len() == 0 {
return Err(BenvError::MissingProgram);
}
let program = vec.remove(0).to_string();
Ok((program, vec))
}
// #[cfg(test)]
// mod test {
// use super::*;
// use env::{Env, EnvList};
// #[test]
// fn test_simple_command() {
// // TODO
// //
// // With latest nightly, it seems impossible to write a proper test case
// // where stdout of the child process is captured.
// //
// // let envlist: EnvList = vec![Env::new("HELLO", "World")];
// // let child = run("echo $HELLO", envlist).unwrap().wait_with_output().unwrap();
// // println!("{:?}", child.stderr);
// // let result = String::from_utf8(child.stdout).unwrap();
// // assert_eq!(result, "world");
// }
// }
|
#![cfg(feature = "inline")]
use std::borrow::Cow;
use std::fmt;
use crate::text::{DiffableStr, TextDiff};
use crate::types::{Algorithm, Change, ChangeTag, DiffOp, DiffTag};
use crate::{capture_diff_deadline, get_diff_ratio};
use std::ops::Index;
use std::time::{Duration, Instant};
use super::utils::upper_seq_ratio;
struct MultiLookup<'bufs, 's, T: DiffableStr + ?Sized> {
strings: &'bufs [&'s T],
seqs: Vec<(&'s T, usize, usize)>,
}
impl<'bufs, 's, T: DiffableStr + ?Sized> MultiLookup<'bufs, 's, T> {
fn new(strings: &'bufs [&'s T]) -> MultiLookup<'bufs, 's, T> {
let mut seqs = Vec::new();
for (string_idx, string) in strings.iter().enumerate() {
let mut offset = 0;
let iter = {
#[cfg(feature = "unicode")]
{
string.tokenize_unicode_words()
}
#[cfg(not(feature = "unicode"))]
{
string.tokenize_words()
}
};
for word in iter {
seqs.push((word, string_idx, offset));
offset += word.len();
}
}
MultiLookup { strings, seqs }
}
pub fn len(&self) -> usize {
self.seqs.len()
}
fn get_original_slices(&self, idx: usize, len: usize) -> Vec<(usize, &'s T)> {
let mut last = None;
let mut rv = Vec::new();
for offset in 0..len {
let (s, str_idx, char_idx) = self.seqs[idx + offset];
last = match last {
None => Some((str_idx, char_idx, s.len())),
Some((last_str_idx, start_char_idx, last_len)) => {
if last_str_idx == str_idx {
Some((str_idx, start_char_idx, last_len + s.len()))
} else {
rv.push((
last_str_idx,
self.strings[last_str_idx]
.slice(start_char_idx..start_char_idx + last_len),
));
Some((str_idx, char_idx, s.len()))
}
}
};
}
if let Some((str_idx, start_char_idx, len)) = last {
rv.push((
str_idx,
self.strings[str_idx].slice(start_char_idx..start_char_idx + len),
));
}
rv
}
}
impl<'bufs, 's, T: DiffableStr + ?Sized> Index<usize> for MultiLookup<'bufs, 's, T> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
self.seqs[index].0
}
}
fn push_values<'s, T: DiffableStr + ?Sized>(
v: &mut Vec<Vec<(bool, &'s T)>>,
idx: usize,
emphasized: bool,
s: &'s T,
) {
v.resize_with(v.len().max(idx + 1), Vec::new);
// newlines cause all kinds of wacky stuff if they end up highlighted.
// because of this we want to unemphasize all newlines we encounter.
if emphasized {
for seg in s.tokenize_lines_and_newlines() {
v[idx].push((!seg.ends_with_newline(), seg));
}
} else {
v[idx].push((false, s));
}
}
/// Represents the expanded textual change with inline highlights.
///
/// This is like [`Change`] but with inline highlight info.
#[derive(Debug, PartialEq, Eq, Hash, Clone, Ord, PartialOrd)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
pub struct InlineChange<'s, T: DiffableStr + ?Sized> {
tag: ChangeTag,
old_index: Option<usize>,
new_index: Option<usize>,
values: Vec<(bool, &'s T)>,
}
impl<'s, T: DiffableStr + ?Sized> InlineChange<'s, T> {
/// Returns the change tag.
pub fn tag(&self) -> ChangeTag {
self.tag
}
/// Returns the old index if available.
pub fn old_index(&self) -> Option<usize> {
self.old_index
}
/// Returns the new index if available.
pub fn new_index(&self) -> Option<usize> {
self.new_index
}
/// Returns the changed values.
///
/// Each item is a tuple in the form `(emphasized, value)` where `emphasized`
/// is true if it should be highlighted as an inline diff.
///
/// Depending on the type of the underlying [`DiffableStr`] this value is
/// more or less useful. If you always want to have a utf-8 string it's
/// better to use the [`InlineChange::iter_strings_lossy`] method.
pub fn values(&self) -> &[(bool, &'s T)] {
&self.values
}
/// Iterates over all (potentially lossy) utf-8 decoded values.
///
/// Each item is a tuple in the form `(emphasized, value)` where `emphasized`
/// is true if it should be highlighted as an inline diff.
pub fn iter_strings_lossy(&self) -> impl Iterator<Item = (bool, Cow<'_, str>)> {
self.values()
.iter()
.map(|(emphasized, raw_value)| (*emphasized, raw_value.to_string_lossy()))
}
/// Returns `true` if this change does not end in a newline and must be
/// followed up by one if line based diffs are used.
pub fn missing_newline(&self) -> bool {
!self.values.last().map_or(true, |x| x.1.ends_with_newline())
}
}
impl<'s, T: DiffableStr + ?Sized> From<Change<&'s T>> for InlineChange<'s, T> {
fn from(change: Change<&'s T>) -> InlineChange<'s, T> {
InlineChange {
tag: change.tag(),
old_index: change.old_index(),
new_index: change.new_index(),
values: vec![(false, change.value())],
}
}
}
impl<'s, T: DiffableStr + ?Sized> fmt::Display for InlineChange<'s, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (emphasized, value) in self.iter_strings_lossy() {
let marker = match (emphasized, self.tag) {
(false, _) | (true, ChangeTag::Equal) => "",
(true, ChangeTag::Delete) => "-",
(true, ChangeTag::Insert) => "+",
};
write!(f, "{}{}{}", marker, value, marker)?;
}
if self.missing_newline() {
writeln!(f)?;
}
Ok(())
}
}
const MIN_RATIO: f32 = 0.5;
const TIMEOUT_MS: u64 = 500;
pub(crate) fn iter_inline_changes<'x, 'diff, 'old, 'new, 'bufs, T>(
diff: &'diff TextDiff<'old, 'new, 'bufs, T>,
op: &DiffOp,
) -> impl Iterator<Item = InlineChange<'x, T>> + 'diff
where
T: DiffableStr + ?Sized,
'x: 'diff,
'old: 'x,
'new: 'x,
{
let (tag, old_range, new_range) = op.as_tag_tuple();
if let DiffTag::Equal | DiffTag::Insert | DiffTag::Delete = tag {
return Box::new(diff.iter_changes(op).map(|x| x.into())) as Box<dyn Iterator<Item = _>>;
}
let mut old_index = old_range.start;
let mut new_index = new_range.start;
let old_slices = &diff.old_slices()[old_range];
let new_slices = &diff.new_slices()[new_range];
if upper_seq_ratio(old_slices, new_slices) < MIN_RATIO {
return Box::new(diff.iter_changes(op).map(|x| x.into())) as Box<dyn Iterator<Item = _>>;
}
let old_lookup = MultiLookup::new(old_slices);
let new_lookup = MultiLookup::new(new_slices);
let ops = capture_diff_deadline(
Algorithm::Patience,
&old_lookup,
0..old_lookup.len(),
&new_lookup,
0..new_lookup.len(),
Some(Instant::now() + Duration::from_millis(TIMEOUT_MS)),
);
if get_diff_ratio(&ops, old_lookup.len(), new_lookup.len()) < MIN_RATIO {
return Box::new(diff.iter_changes(op).map(|x| x.into())) as Box<dyn Iterator<Item = _>>;
}
let mut old_values = Vec::<Vec<_>>::new();
let mut new_values = Vec::<Vec<_>>::new();
for op in ops {
match op {
DiffOp::Equal {
old_index,
len,
new_index,
} => {
for (idx, slice) in old_lookup.get_original_slices(old_index, len) {
push_values(&mut old_values, idx, false, slice);
}
for (idx, slice) in new_lookup.get_original_slices(new_index, len) {
push_values(&mut new_values, idx, false, slice);
}
}
DiffOp::Delete {
old_index, old_len, ..
} => {
for (idx, slice) in old_lookup.get_original_slices(old_index, old_len) {
push_values(&mut old_values, idx, true, slice);
}
}
DiffOp::Insert {
new_index, new_len, ..
} => {
for (idx, slice) in new_lookup.get_original_slices(new_index, new_len) {
push_values(&mut new_values, idx, true, slice);
}
}
DiffOp::Replace {
old_index,
old_len,
new_index,
new_len,
} => {
for (idx, slice) in old_lookup.get_original_slices(old_index, old_len) {
push_values(&mut old_values, idx, true, slice);
}
for (idx, slice) in new_lookup.get_original_slices(new_index, new_len) {
push_values(&mut new_values, idx, true, slice);
}
}
}
}
let mut rv = Vec::new();
for values in old_values {
rv.push(InlineChange {
tag: ChangeTag::Delete,
old_index: Some(old_index),
new_index: None,
values,
});
old_index += 1;
}
for values in new_values {
rv.push(InlineChange {
tag: ChangeTag::Insert,
old_index: None,
new_index: Some(new_index),
values,
});
new_index += 1;
}
Box::new(rv.into_iter()) as Box<dyn Iterator<Item = _>>
}
#[test]
fn test_line_ops_inline() {
let diff = TextDiff::from_lines(
"Hello World\nsome stuff here\nsome more stuff here\n\nAha stuff here\nand more stuff",
"Stuff\nHello World\nsome amazing stuff here\nsome more stuff here\n",
);
assert_eq!(diff.newline_terminated(), true);
let changes = diff
.ops()
.iter()
.flat_map(|op| diff.iter_inline_changes(op))
.collect::<Vec<_>>();
insta::assert_debug_snapshot!(&changes);
}
#[test]
#[cfg(feature = "serde")]
fn test_serde() {
let diff = TextDiff::from_lines(
"Hello World\nsome stuff here\nsome more stuff here\n\nAha stuff here\nand more stuff",
"Stuff\nHello World\nsome amazing stuff here\nsome more stuff here\n",
);
assert_eq!(diff.newline_terminated(), true);
let changes = diff
.ops()
.iter()
.flat_map(|op| diff.iter_inline_changes(op))
.collect::<Vec<_>>();
let json = serde_json::to_string_pretty(&changes).unwrap();
insta::assert_snapshot!(&json);
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type PreallocatedWorkItem = *mut ::core::ffi::c_void;
pub type SignalHandler = *mut ::core::ffi::c_void;
pub type SignalNotifier = *mut ::core::ffi::c_void;
|
#![allow(
clippy::missing_docs_in_private_items,
clippy::missing_inline_in_public_items,
clippy::implicit_return,
clippy::result_unwrap_used,
clippy::option_unwrap_used,
clippy::print_stdout,
clippy::use_debug,
clippy::integer_arithmetic,
clippy::default_trait_access,
)]
#![forbid(unsafe_code)]
extern crate pest;
#[macro_use]
extern crate pest_derive;
use pest::iterators::{Pairs, Pair};
use crate::sqm::file::File;
use crate::sqm::array::Array;
use crate::sqm::class::Class;
use pest::Parser;
mod sqm;
#[derive(Parser)]
#[grammar = "sqm.pest"]
pub struct SQMParser;
#[inline]
pub fn deserialize_json(json_string: &str) -> File{
serde_json::from_str(json_string).unwrap()
}
#[inline]
pub fn serialize_sqm_string(sqm_data: &str, pretty: bool) -> String{
let parsed: Pairs<Rule> = SQMParser::parse(Rule::file, sqm_data).unwrap();
let parsed_file = parse_file(parsed);
serialize_pairs(&parsed_file, pretty)
}
#[inline]
pub fn serialize_pairs(filedata: &File, pretty: bool) -> String{
if pretty {
serde_json::to_string_pretty(&filedata).unwrap()
}else {
serde_json::to_string(&filedata).unwrap()
}
}
#[inline]
pub fn serialize_to_sqm(filedata: &File, filename: &str){
let file = std::fs::File::create(filename).unwrap();
filedata.walk(&file);
}
#[inline]
pub fn parse_file(filedata: Pairs<Rule>) -> File{
let file = filedata.enumerate().nth(0).unwrap().1.into_inner();
let mut file_strc: File = File{
items: Default::default(),
arrays: Default::default(),
classes: Default::default()
};
for top_level_entries in file {
match top_level_entries.as_rule() {
Rule::item => {
let items = parse_item(top_level_entries);
file_strc.items.insert(items.0, items.1);
}
Rule::class => {
let items = parse_class(top_level_entries);
file_strc.classes.insert(items.0, items.1);
}
Rule::array => {
let items = parse_array(top_level_entries);
file_strc.arrays.insert(items.0, items.1);
}
Rule::WHITESPACE | Rule::char | Rule::string | Rule::strict_string | Rule::number | Rule::key | Rule::value | Rule::file => unreachable!()
}
}
file_strc
}
#[inline]
pub fn parse_class(item: Pair<Rule>) -> (String, Class) {
let inner = item.into_inner();
let mut retclass: Class = Class{
items: Default::default(),
arrays: Default::default(),
classes: Default::default()
};
let mut key: String = "".to_string();
for items in inner {
match items.as_rule() {
Rule::key => {
key = get_string_from_pair(&items)
}
Rule::item => {
let items = parse_item(items);
retclass.items.insert(items.0, items.1);
}
Rule::class => {
let items = parse_class(items);
retclass.classes.insert(items.0, items.1);
}
Rule::array => {
let items = parse_array(items);
retclass.arrays.insert(items.0, items.1);
}
Rule::WHITESPACE | Rule::char | Rule::string | Rule::strict_string | Rule::number | Rule::value | Rule::file => unreachable!()
}
}
//println!("{:#?}", retclass);
(key, retclass)
}
#[inline]
pub fn parse_item(item: Pair<Rule>) -> (String, String){
let mut inner = item.into_inner();
let key = get_string_from_pair(&inner.next().unwrap());
let value = get_string_from_pair(&inner.next().unwrap());
(key, value)
}
#[inline]
pub fn parse_array(item: Pair<Rule>) -> (String, Array){
let inner = item.into_inner();
let mut retarray: Array = Array{ values: vec![] };
let mut key: String = "".to_string();
for x in inner {
match x.as_rule() {
Rule::key => {
key = get_string_from_pair(&x);
}
Rule::value => {
retarray.values.push(get_string_from_pair(&x));
}
Rule::WHITESPACE | Rule::char | Rule::string | Rule::strict_string | Rule::number | Rule::item | Rule::array | Rule::class | Rule::file => unreachable!()
}
}
(key, retarray)
}
#[inline]
fn get_string_from_pair(pair: &Pair<Rule>) -> String{
String::from(pair.as_span().as_str())
} |
use itertools::Itertools;
use std::collections::HashMap;
use std::collections::HashSet;
fn main() {
let input = std::fs::read_to_string("input").unwrap();
let foods: Vec<(Vec<&str>, HashSet<&str>)> = input
.lines()
.map(|l| {
let mut food = l.trim_end_matches(")").split(" (contains ");
(
food.next().unwrap().split_whitespace().collect(),
food.next().unwrap().split(", ").collect(),
)
})
.collect();
let allergens = foods
.iter()
.flat_map(|f| f.1.iter().copied())
.sorted()
.unique()
.collect::<Vec<&str>>();
let mut identified = HashMap::new();
while allergens.len() > identified.len() {
for allergen in &allergens {
if identified.contains_key(allergen) {
continue;
}
let candidates = foods
.iter()
.filter(|f| f.1.contains(allergen))
.map(|f| {
f.0.iter()
.filter(|i| !identified.values().any(|i2| i == &i2))
.copied()
.collect::<HashSet<&str>>()
})
.fold1(|a, b| a.intersection(&b).copied().collect())
.unwrap();
if candidates.len() == 1 {
identified.insert(allergen, candidates.iter().next().copied().unwrap());
}
}
}
let p1 = foods
.iter()
.flat_map(|f| f.0.iter())
.filter(|i| !identified.values().any(|i2| i == &i2))
.count();
dbg!(p1);
let p2 = identified.iter().sorted_by_key(|p| p.0).map(|p| p.1).join(",");
dbg!(p2);
}
|
pub use VkSamplerMipmapMode::*;
#[repr(u32)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum VkSamplerMipmapMode {
VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
}
|
use {
std::{env, fs, io},
tracing::info,
};
fn main() {
tracing_subscriber::fmt().with_env_filter("info").init();
advent_of_code::init_parts();
// Get day string
let args: Vec<String> = env::args().collect();
let mut year = String::new();
let mut day = String::new();
if args.len() >= 2 {
year = args[1].clone();
day = args[2].clone();
} else {
info!("Enter year: ");
io::stdin()
.read_line(&mut year)
.expect("Failed to read line");
info!("Enter day: ");
io::stdin()
.read_line(&mut day)
.expect("Failed to read line");
}
// Parse year as number
year = year.trim().to_string();
let year_num: usize = match year.parse() {
Ok(num) => num,
Err(_) => {
info!("Invalid year number: {}", year);
return;
}
};
// Parse day as number
day = day.trim().to_string();
let day_num: u8 = match day.parse() {
Ok(num) => num,
Err(_) => {
info!("Invalid day number: {}", day);
return;
}
};
// Read input file
let cwd = env::current_dir().unwrap();
let filename = cwd
.join("inputs")
.join(format!("year{year_num:04}/day{day_num:02}.txt"));
info!("Reading {}", filename.display());
let input = fs::read_to_string(filename).expect("Error while reading");
// Get corresponding function
let parts = advent_of_code::PARTS.get(&(year_num, day_num));
for (i, part) in parts.iter().enumerate().map(|(i, part)| (i + 1, part)) {
info!("Running Part {}", i);
part(&input);
info!("Part {} complete.", i);
}
}
|
//! Prints the elapsed time every 1 second and quits on Ctrl+C.
#[macro_use]
extern crate crossbeam_channel;
extern crate signal_hook;
use std::io;
use std::thread;
use std::time::{Duration, Instant};
use crossbeam_channel::{bounded, tick, Receiver};
use signal_hook::iterator::Signals;
use signal_hook::SIGINT;
// Creates a channel that gets a message every time `SIGINT` is signalled.
fn sigint_notifier() -> io::Result<Receiver<()>> {
let (s, r) = bounded(100);
let signals = Signals::new(&[SIGINT])?;
thread::spawn(move || {
for _ in signals.forever() {
if s.send(()).is_err() {
break;
}
}
});
Ok(r)
}
// Prints the elapsed time.
fn show(dur: Duration) {
println!(
"Elapsed: {}.{:03} sec",
dur.as_secs(),
dur.subsec_nanos() / 1_000_000
);
}
fn main() {
let start = Instant::now();
let update = tick(Duration::from_secs(1));
let ctrl_c = sigint_notifier().unwrap();
loop {
select! {
recv(update) -> _ => {
show(start.elapsed());
}
recv(ctrl_c) -> _ => {
println!();
println!("Goodbye!");
show(start.elapsed());
break;
}
}
}
}
|
use crate::raw_flash_algorithm::RawFlashAlgorithm;
use crate::Chip;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// This describes a chip family with all its variants.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct ChipFamily {
/// This is the name of the chip family in base form.
/// E.g. `nRF52832`.
pub(crate) name: String,
/// This vector holds all the variants of the family.
pub(crate) variants: Vec<Chip>,
/// This vector holds all available algorithms.
pub(crate) flash_algorithms: HashMap<String, RawFlashAlgorithm>,
/// The name of the core type.
/// E.g. `M0` or `M4`.
pub(crate) core: String,
}
impl ChipFamily {
/// Create a new `ChipFamily`.
pub(crate) fn new(
name: String,
flash_algorithms: HashMap<String, RawFlashAlgorithm>,
core: String,
) -> Self {
Self {
name,
variants: Vec::new(),
flash_algorithms,
core,
}
}
}
|
use crate::traits::RendererBase;
use piston::input::{UpdateArgs, RenderArgs, Key};
use opengl_graphics::GlGraphics;
use graphics::Context;
use graphics::{rectangle};
use crate::audio::AnalyzedAudio;
/**
* Each renderer consists of three things. First, the struct defining its
* state. Secondly, an impl that defines the specific methods of the struct
* that won't be called by the application. And third, the trait implementation
* which defines all methods that are necessary as the application expects them.
*/
pub struct StereoTree {
width: u32,
height: u32,
hue: f32
}
impl StereoTree {
pub fn create () -> Self {
Self {
width: 200,
height: 200,
hue: 0.0
}
}
/// Helper function to calculate a color based on the hue
fn hue_to_rgb (hue: f32) -> [f32; 4] {
let hue_segment = (hue as f64 / 60.0).trunc() as u32 + 1; // Get one of the six segments
let fraction: f32 = hue % 60.0 / 60.0;
let mut rgba: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
// LOOK AT MA MATH SKILLZ!!
match hue_segment {
1 => {
rgba[0] = 1.0;
rgba[1] = fraction;
rgba[2] = 0.0;
},
2 => {
rgba[0] = fraction;
rgba[1] = 1.0;
rgba[2] = 0.0;
},
3 => {
rgba[0] = 0.0;
rgba[1] = 1.0;
rgba[2] = fraction;
},
4 => {
rgba[0] = 0.0;
rgba[1] = fraction;
rgba[2] = 1.0;
},
5 => {
rgba[0] = fraction;
rgba[1] = 0.0;
rgba[2] = 1.0;
},
_ => {
rgba[0] = 1.0;
rgba[1] = 0.0;
rgba[2] = fraction;
} // Includes segment 6 and weird other numbers
}
rgba
}
}
impl RendererBase for StereoTree {
fn render (&mut self, gl: &mut GlGraphics, context: Context, args: &RenderArgs, audio: &AnalyzedAudio) {
// Always make sure to use the correct sizes to calculate with
self.width = args.draw_size[0];
self.height = args.draw_size[1];
let center: f64 = self.width as f64 / 2.0;
let center_bar_width: f64 = self.width as f64 * 0.001; // 0,1 %
let max_width: f64 = self.width as f64 / 3.0;
if audio.frequency[0].is_empty() {
return; // Nothing to render
}
// Before we are done rendering, display the center bar
rectangle([1.0, 1.0, 1.0, 1.0], [center - center_bar_width / 2.0, 0.0, center_bar_width, self.height as f64], context.transform, gl);
// We don't want 22kHz displayed as this would be WAY too unreasonable,
// so we need to find the correct cutoff frequency for which to perform
// some crude lowpass filter.
// f / bin = i
let mut cutoff_frequency = (20_000.0 / audio.bin_frequency).floor() as usize;
if cutoff_frequency > audio.frequency[0].len() {
cutoff_frequency = audio.frequency[0].len();
}
// Determine how high the frequency bars may be at the most
// let rectangle_width: f64 = (self.width as f64 / 2.0) / cutoff_frequency as f64;
let frequency_bar_height: f64 = self.height as f64 / cutoff_frequency as f64;
let amplitude_bar_height: f64 = self.height as f64 / audio.amplitude[0].len() as f64;
// Second, calculate the maximum frequency amplitude (= max width)
let mut max_frequency_amp = 0.0;
for sample in audio.frequency[0][0..cutoff_frequency].iter() {
if sample.abs() as f64 > max_frequency_amp {
max_frequency_amp = sample.abs() as f64;
}
}
// Third, calculate the maximum volume amplitude (= max width)
let mut max_volume_amp = 0.0;
for sample in audio.amplitude[0].iter() {
if sample.abs() as f64 > max_volume_amp {
max_volume_amp = sample.abs() as f64;
}
}
// Display the bars! First the amplitude (as grey underlying bars) ...
for i in 0..audio.amplitude[0].len() {
let amplitude_left = audio.amplitude[0][i] as f64;
let amplitude_right = audio.amplitude[1][i] as f64;
let mut width_left: f64 = amplitude_left / max_volume_amp; // val from 0.0-1.0
let mut width_right: f64 = amplitude_right / max_volume_amp;
// Normalize
if width_left > 1.0 {
width_left = 1.0;
}
if width_right > 1.0 {
width_right = 1.0;
}
// Transform to final values
width_left *= max_width;
width_right *= max_width;
// Now calculate the rectangles
let posx_left = center - width_left;
let posy_left = self.height as f64 - amplitude_bar_height * (i as f64 + 1.0);
let posx_right = center; // Always begins in the center
let posy_right = posy_left;
let opacity = self.hue / 360.0;
rectangle([0.3, 0.3, 0.3, opacity], [posx_left, posy_left, width_left, amplitude_bar_height], context.transform, gl);
rectangle([0.3, 0.3, 0.3, opacity], [posx_right, posy_right, width_right, amplitude_bar_height], context.transform, gl);
}
// ... and then a colourful frequency on top
for i in 0..cutoff_frequency {
let col = Self::hue_to_rgb(self.hue + i as f32 % 360.0);
let frequency_left = audio.frequency[0][i] as f64;
let frequency_right = audio.frequency[1][i] as f64;
let mut width_left: f64 = frequency_left.abs() / max_frequency_amp; // val from 0.0-1.0
let mut width_right: f64 = frequency_right.abs() / max_frequency_amp;
// Normalize
if width_left > 1.0 {
width_left = 1.0;
}
if width_right > 1.0 {
width_right = 1.0;
}
// Transform to final values
width_left *= max_width;
width_right *= max_width;
// Now calculate the rectangles
let posx_left = center - width_left;
let posy_left = self.height as f64 - frequency_bar_height * (i as f64 + 1.0);
let posx_right = center; // Always begins in the center
let posy_right = posy_left;
rectangle(col, [posx_left, posy_left, width_left, frequency_bar_height], context.transform, gl);
rectangle(col, [posx_right, posy_right, width_right, frequency_bar_height], context.transform, gl);
}
// And done!
}
fn update (&mut self, _args: &UpdateArgs) {
self.hue += 0.1;
if self.hue > 360.0 {
self.hue = 0.0;
}
}
fn on_cursor_movement (&mut self, _x: f64, _y: f64) {
// This renderer does not react to mouse events :(
}
fn on_cursor_state (&mut self, _is_over_window: bool) {
// This renderer does not react to mouse events :(
}
fn on_click (&mut self) {
// Don't react to anything
}
fn on_keypress (&mut self, _key: Key) {
// Stoic renderer, I tell you
}
}
|
use std::error::Error;
use std::io::prelude::*;
use bio::io::fasta;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct Sequence<'a> {
pub id: &'a str,
pub description: Option<String>,
pub sequence: &'a str,
}
impl<'a> From<Sequence<'a>> for fasta::Record {
fn from(entry: Sequence<'a>) -> fasta::Record {
fasta::Record::with_attrs(
entry.id,
entry.description.as_ref().map(|s| s.as_str()),
entry.sequence.as_bytes(),
)
}
}
pub fn each_sequence(
mut reader: Box<dyn BufRead>,
mut f: impl FnMut(Sequence) -> Result<(), Box<dyn Error>>,
) -> Result<(), Box<dyn Error>> {
let mut buf = String::new();
loop {
match reader.read_line(&mut buf)? {
0 => break,
_ => {
let cleaned = buf.replace("\\\\", "\\");
let data: Sequence = serde_json::from_str(&cleaned)?;
f(data)?;
buf.clear();
}
}
}
Ok(())
}
|
extern crate rustfft;
extern crate ndarray;
extern crate image;
extern crate ofuton;
use rustfft::{FFTplanner, FFTnum};
use rustfft::num_complex::Complex;
use rustfft::num_traits::Zero;
use ndarray::{Array, Array2, ArrayView, ArrayViewMut, ArrayViewMut2, Dimension};
use image::{ColorType, GenericImage, DynamicImage};
use std::fmt::Debug;
fn shift<T:Default+ Copy+ Clone>(view: &ArrayViewMut2<T>) -> Vec<T> {
let dimensions = view.raw_dim().into_pattern();
let len = dimensions.0 * dimensions.1;
let width = dimensions.0;
let height = dimensions.1;
let mut shifted: Vec<T> = Vec::with_capacity(len);
shifted.resize(len, T::default());
for (i, row) in view.genrows().into_iter().enumerate() {
let row = row.as_slice().unwrap();
let mut new_row = Vec::with_capacity(width);
new_row.extend_from_slice(&row[width/2..]);
new_row.extend_from_slice(&row[0..width/2]);
let idx = width * ((i + (height/2)) % height);
shifted[idx..idx+width].copy_from_slice(&new_row);
}
shifted
}
fn normalize(view: Vec<Complex<f64>>) -> Vec<u8> {
let max = view.iter().max_by(|a, b| (a.re.abs()).partial_cmp(&b.re.abs()).unwrap()).unwrap().re.abs();
let max = max.log10();
let mut result: Vec<u8> = Vec::with_capacity(view.len());
for v in view {
let mut real = v.re.abs();
if real < 1.0 {
real = 1.0;
}
real = real.log10();
let norm = ((real / max) * 255.).floor() as u8;
result.push(norm);
}
result
}
fn spectral(img: &DynamicImage) -> Vec<Complex<f64>> {
// fn spectral(img: &DynamicImage) -> Vec<u8> {
let dimensions = img.dimensions();
let shape = (dimensions.0 as usize, dimensions.1 as usize);
let pixels: Vec<u8> = img.to_luma().pixels().map(|a| a.data[0]).collect();
let mut buffer: Vec<Complex<f64>> = pixels.iter().map(|a| Complex::new(*a as f64, 0.0)).collect();
let mut outputbuffer = vec![Zero::zero(); buffer.len()];
let mut input = ArrayViewMut::from_shape(shape, &mut buffer).unwrap();
let mut output = ArrayViewMut::from_shape(shape, &mut outputbuffer).unwrap();
ofuton::fft2(&mut input, &mut output);
shift(&output)
// let shifted = shift(&output);
// normalize(shifted)
}
fn low_pass_filter_inplace(mut input: &mut ArrayViewMut2<Complex<f64>>, r: usize) {
let len = input.shape().into_iter().product();
let mut filter = Vec::with_capacity(len);
let icenter = input.shape()[0] / 2;
let jcenter = input.shape()[1] / 2;
for i in 0..input.shape()[0] {
for j in 0..input.shape()[1] {
let idiff = if i >= icenter { i - icenter } else { icenter - i };
let jdiff = if j >= jcenter { j - jcenter } else { jcenter - j };
if idiff.pow(2) + jdiff.pow(2) < r.pow(2) {
filter.push(Complex::new(1., 0.));
} else {
filter.push(Complex::new(0., 0.));
}
}
}
let filter = ArrayView::from_shape(input.shape(), &filter).unwrap();
*input *= &filter;
}
fn high_pass_filter(input: Array2<Complex<f64>>, r: usize) -> Array2<Complex<f64>> {
let len = input.shape().into_iter().product();
let mut filter = Vec::with_capacity(len);
let icenter = input.shape()[0] / 2;
let jcenter = input.shape()[1] / 2;
for i in 0..input.shape()[0] {
for j in 0..input.shape()[1] {
let idiff = if i >= icenter { i - icenter } else { icenter - i };
let jdiff = if j >= jcenter { j - jcenter } else { jcenter - j };
if idiff.pow(2) + jdiff.pow(2) < r.pow(2) {
filter.push(Complex::new(0., 0.));
} else {
filter.push(Complex::new(1., 0.));
}
}
}
let filter = ArrayView::from_shape(input.shape(), &filter).unwrap();
input * filter
}
fn low_pass_filter(input: Array2<Complex<f64>>, r: usize) -> Array2<Complex<f64>> {
let len = input.shape().into_iter().product();
let mut filter = Vec::with_capacity(len);
let icenter = input.shape()[0] / 2;
let jcenter = input.shape()[1] / 2;
for i in 0..input.shape()[0] {
for j in 0..input.shape()[1] {
let idiff = if i >= icenter { i - icenter } else { icenter - i };
let jdiff = if j >= jcenter { j - jcenter } else { jcenter - j };
if idiff.pow(2) + jdiff.pow(2) < r.pow(2) {
filter.push(Complex::new(1., 0.));
} else {
filter.push(Complex::new(0., 0.));
}
}
}
let filter = ArrayView::from_shape(input.shape(), &filter).unwrap();
input * filter
}
fn main() {
let mut args = ::std::env::args();
let x = args.nth(1).unwrap();
let filter_size = x.parse::<usize>().unwrap();
let img = image::open("gray.png").unwrap();
let dimensions = img.dimensions();
let shape = (dimensions.0 as usize, dimensions.1 as usize);
let pixels: Vec<u8> = img.to_luma().pixels().map(|a| a.data[0]).collect();
let mut buffer: Vec<Complex<f64>> = pixels.iter().map(|a| Complex::new(*a as f64, 0.0)).collect();
let mut outputbuffer = vec![Zero::zero(); buffer.len()];
let mut resultbuffer = vec![Zero::zero(); buffer.len()];
let mut input = ArrayViewMut::from_shape(shape, &mut buffer).unwrap();
let mut output = ArrayViewMut::from_shape(shape, &mut outputbuffer).unwrap();
{
let mut result = ArrayViewMut::from_shape(shape, &mut resultbuffer).unwrap();
ofuton::fft2(&mut input, &mut output);
let mut shifted_buffer = shift(&output);
let mut shifted = ArrayViewMut::from_shape(shape, &mut shifted_buffer).unwrap();
low_pass_filter_inplace(&mut shifted, filter_size);
let shifted = shifted.view_mut();
let mut unshifted_buffer = shift(&shifted);
let mut unshifted = ArrayViewMut::from_shape(shape, &mut unshifted_buffer).unwrap();
ofuton::ifft2(&mut unshifted, &mut result);
}
let result: Vec<u8> = resultbuffer.into_iter().map(|x| x.re as u8).collect();
let filename = format!("high_pass_filter_{}.jpg", filter_size);
let _ = image::save_buffer(filename, &result, shape.0 as u32, shape.1 as u32, ColorType::Gray(8));
}
fn _main() {
let mut args = ::std::env::args();
let x = args.nth(1).unwrap();
let filter_size = x.parse::<usize>().unwrap();
let image = image::open("gray.jpg").unwrap();
let mut fimage = spectral(&image);
// let _ = image::save_buffer("filtered.jpg", &fimage, image.dimensions().0, image.dimensions().1, ColorType::Gray(8));
// let fimageview = Array::from_shape_vec((image.dimensions().0 as usize, image.dimensions().1 as usize), fimage).unwrap();
// let mut fimage: Vec<Complex<f64>> = vec![];
// let image_array = low_pass_filter(fimageview, filter_size);
// for row in image_array.genrows() {
// fimage.extend(&row);
// }
// let filtered_spectrum:Vec<u8> = fimage.iter().map(|x| x.re as u8).collect();
// let _ = image::save_buffer("filtered.jpg", &filtered_spectrum, image.dimensions().0, image.dimensions().1, ColorType::Gray(8));
{
// let mut image_array = ArrayViewMut::from_shape((image.dimensions().0 as usize, image.dimensions().1 as usize), &mut fimage).unwrap();
// let fimage = shift(&mut image_array);
}
// let mut fimage:Vec<Complex<f64>> = fimage.into_iter().map(|x| Complex::new(x as f64, 0.0)).collect();
let mut image_array = ArrayViewMut::from_shape((image.dimensions().0 as usize, image.dimensions().1 as usize), &mut fimage).unwrap();
let mut result_vec = vec![Zero::zero(); image_array.shape()[0] * image_array.shape()[1]];
{
let mut result = ArrayViewMut::from_shape((image_array.shape()[0], image_array.shape()[1]), &mut result_vec).unwrap();
ofuton::ifft2(&mut image_array, &mut result);
}
let fimage: Vec<u8> = result_vec.into_iter().map(|x| x.re as u8).collect();
println!("{:?}", &fimage);
// let filename = format!("filter_{}.jpg", filter_size);
let filename = "hoge.jpg";
let _ = image::save_buffer(filename, &fimage, image.dimensions().0, image.dimensions().1, ColorType::Gray(8));
}
#[cfg(test)]
mod test {
use super::{fft, ifft, fft2, ifft2, mutate_lane, low_pass_filter, shift};
use ndarray::{Array, ArrayViewMut, Axis, Dimension};
use rustfft::FFTplanner;
use rustfft::num_complex::Complex;
use rustfft::num_traits::Zero;
#[test]
fn test_dimensions() {
// 一番内側が後!二次元ならy,x
let a = array![[1,2,3], [4,5,6]];
assert_eq!(a.shape(), &[2,3]);
let a = a.reversed_axes();
assert_eq!(a.shape(), &[3,2]);
let a = array![
[
[ 0, 1, 2],
[ 3, 4, 5]
],
[
[ 6, 7, 8],
[ 9, 10, 11]
]
];
assert_eq!(a.shape(), &[2,2,3]);
}
#[test]
fn swap_axes() {
let mut a = array![[1,2,3], [4,5,6]];
// slice取れる
for mut v in a.genrows_mut() {
assert!(v.as_slice_mut().is_some());
}
a.swap_axes(0,1);
assert_eq!(a, array![[1,4],[2,5],[3,6]]);
// slice取れない
for mut v in a.genrows_mut() {
assert!(v.as_slice_mut().is_none());
}
}
#[test]
fn test_slice_mut() {
// slice_mutはsliceを取るものじゃなくてsliceしたArrayViewを取るもの
// let mut a = array![[1,2,3], [4,5,6]];
// let slice = a.slice_mut(s![..2,..1]);
// assert_eq!(slice, array![[1,4]]);
}
#[test]
fn test_col_slicing_mutate() {
fn f(input: &mut [u8], output: &mut [u8]) {
for i in 0..output.len() {
output[i] = input[i] * ((i % 256) as u8);
}
}
fn g<D: Dimension>(input: &mut ArrayViewMut<u8, D>, output: &mut ArrayViewMut<u8, D>, f: fn(&mut [u8], &mut [u8]) -> (), axis: usize) {
if axis > 0 {
input.swap_axes(0, axis);
output.swap_axes(0, axis);
{
let mut outrows = output.genrows_mut().into_iter();
for mut row in input.genrows_mut() {
let mut outrow = outrows.next().unwrap();
let mut vec = row.to_vec();
let mut out = vec![0; outrow.len()];
f(&mut vec, &mut out);
for i in 0..outrow.len() {
outrow[i] = out[i];
}
}
}
input.swap_axes(0, axis);
output.swap_axes(0, axis);
} else {
let mut outrows = output.genrows_mut().into_iter();
for mut row in input.genrows_mut() {
let mut outrow = outrows.next().unwrap();
f(&mut row.as_slice_mut().unwrap(), &mut outrow.as_slice_mut().unwrap());
}
}
}
let mut a = vec![0,1,2,3,4,5,6,7,8,9,10,11];
let mut b = vec![0; 12];
{
let mut view = ArrayViewMut::from_shape((3,4), &mut a).unwrap();
let mut outview = ArrayViewMut::from_shape((3,4), &mut b).unwrap();
g(&mut view, &mut outview, f, 1);
}
assert_eq!(b, vec![0,0,0,0,4,5,6,7,16,18,20,22]);
let mut a = vec![0,1,2,3,4,5,6,7,8,9,10,11];
let mut b = vec![0; 12];
{
let mut view = ArrayViewMut::from_shape((3,4), &mut a).unwrap();
let mut outview = ArrayViewMut::from_shape((3,4), &mut b).unwrap();
g(&mut view, &mut outview, f, 0);
}
assert_eq!(b, vec![0,1,4,9,0,5,12,21,0,9,20,33]);
}
#[test]
fn test_mutate_col() {
fn f(input: &mut [u8], output: &mut [u8]) {
let avg = input.iter().fold(0, |a, b| a + b) / input.len() as u8;
for i in 0..output.len() {
output[i] = input[i] + avg;
}
}
let mut a = vec![0,1,2,
3,4,5,
6,7,8];
let mut b = vec![0; 9];
{
let mut input = ArrayViewMut::from_shape((3,3), &mut a).unwrap();
let mut output = ArrayViewMut::from_shape((3,3), &mut b).unwrap();
mutate_lane(&mut input, &mut output, f, 1);
}
assert_eq!(b, vec![ 3, 5, 7,
6, 8, 10,
9, 11, 13]);
}
#[test]
fn test_mutate_twice() {
fn f(input: &mut [u8], output: &mut [u8]) {
let avg = input.iter().fold(0, |a, b| a + b) / input.len() as u8;
for i in 0..output.len() {
output[i] = input[i] + avg;
}
}
let mut a = vec![0,1,2,
3,4,5,
6,7,8];
let mut b = vec![0; 9];
{
let mut input = ArrayViewMut::from_shape((3,3), &mut a).unwrap();
let mut output = ArrayViewMut::from_shape((3,3), &mut b).unwrap();
mutate_lane(&mut input, &mut output, f, 0);
}
assert_eq!(b, vec![ 1, 2, 3,
7, 8, 9,
13, 14, 15]);
let mut a = b;
let mut b = vec![0; 9];
{
let mut input = ArrayViewMut::from_shape((3,3), &mut a).unwrap();
let mut output = ArrayViewMut::from_shape((3,3), &mut b).unwrap();
mutate_lane(&mut input, &mut output, f, 1);
}
assert_eq!(b, vec![ 8, 10, 12,
14, 16, 18,
20, 22, 24]);
}
// 45+0i, -4.499999999999995+12.363648387545796i, -4.500000000000002+5.362891166673942i
// -4.499999999999993+2.5980762113533165i, -4.4999999999999964+0.7934714131880969i, -4.499999999999997-0.793471413188092i
// -4.499999999999993-2.5980762113533165i, -4.5-5.36289116667394i, -4.499999999999992-12.363648387545796i
// 9.000000000000032+0.000000000000007105427357601002i, 80.99999999999999+0.0000000000000008881784197001252i, 71.99999999999999+0.000000000000015987211554602254i
// 63.00000000000001-0.0000000000000035527136788004994i, 53.999999999999986+0.0000000000000026326510883636403i, 44.999999999999986+0.0000000000000012366151173399847i
// 35.99999999999999-0.0000000000000035527136788004994i, 27.000000000000014-0.0000000000000035208295080637655i, 18.000000000000014-0.000000000000017223826671942234i
#[test]
fn test_inverse() {
let a:Vec<Complex<f64>> = vec![1.,2.,3.,4.,5.,6.,7.,8.,9.].into_iter().map(|i|Complex::from(i)).collect();
let mut b = vec![Zero::zero(); 9];
let mut b2 = vec![Zero::zero(); 9];
ofuton::fft(&mut a.clone(), &mut b);
ofuton::ifft(&mut b, &mut b2);
for (a, b) in a.iter().zip(b2) {
assert!((a.re-b.re).abs() < 0.0001)
}
}
#[test]
fn test_inverse2() {
let mut a:Vec<Complex<f64>> = vec![1.,2.,3.,4.,5.,6.,7.,8.,9.].into_iter().map(|i|Complex::from(i)).collect();
let mut input = ArrayViewMut::from_shape((3,3), &mut a).unwrap();
let mut b = vec![Zero::zero(); 9];
let mut output = ArrayViewMut::from_shape((3,3), &mut b).unwrap();
let mut b2 = vec![Zero::zero(); 9];
{
let mut output2 = ArrayViewMut::from_shape((3,3), &mut b2).unwrap();
ofuton::fft2(&mut input, &mut output);
ofuton::ifft2(&mut output, &mut output2);
}
let a:Vec<Complex<f64>> = vec![1.,2.,3.,4.,5.,6.,7.,8.,9.].into_iter().map(|i|Complex::from(i)).collect();
for (a, b) in a.iter().zip(b2) {
assert!((a.re-b.re).abs() < 0.0001)
}
}
#[test]
fn test_shift() {
let mut vec = vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16];
let mut view = ArrayViewMut::from_shape((4,4), &mut vec).unwrap();
let shifted = shift(&view);
let expected = vec![1, 2, 3, 4,
5, 6, 7, 8,
9, 10, 11, 12,
13, 14, 15, 16];
let expected = vec![11, 12, 9, 10,
15, 16, 13, 14,
3, 4, 1, 2,
7, 8, 5, 6];
assert_eq!(shifted, expected);
}
#[test]
fn test_shift_twice() {
let mut vec = vec![1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16];
let mut view = ArrayViewMut::from_shape((4,4), &mut vec).unwrap();
let mut shifted = shift(&view);
let mut view = ArrayViewMut::from_shape((4,4), &mut shifted).unwrap();
let mut shifted = shift(&view);
let expected = vec![1, 2, 3, 4,
5, 6, 7, 8,
9, 10, 11, 12,
13, 14, 15, 16];
assert_eq!(shifted, expected);
}
}
|
use crate::{
feed_generator::FeedGenerator,
utils::{now, NabuResult},
};
use atom_syndication::{Category, Content, Entry, Feed, FixedDateTime, Link, Person};
use reqwest;
use serde::Deserialize;
use serde_json;
pub struct HotTopicsGenerator;
#[derive(Debug, Deserialize)]
struct Topic {
node: Node,
member: Member,
#[serde(deserialize_with = "crate::utils::secord_to_datetime")]
last_modified: FixedDateTime,
id: i128,
content_rendered: String,
title: String,
#[serde(deserialize_with = "crate::utils::secord_to_datetime")]
created: FixedDateTime,
url: String,
}
#[derive(Debug, Deserialize)]
struct Node {
name: String,
title: String,
url: String,
id: i128,
}
#[derive(Debug, Deserialize)]
struct Member {
username: String,
url: String,
id: i128,
}
impl FeedGenerator for HotTopicsGenerator {
type Info = ();
const PATH: &'static str = "topics/hot";
fn update(_: &Self::Info) -> NabuResult<Feed> {
let url = "https://www.v2ex.com/api/topics/hot.json";
let text = reqwest::get(url)?.text()?;
let topics: Vec<Topic> = serde_json::from_str(&text)?;
let result = Feed {
title: "V2ex每日热点".to_string(),
id: "V2ex Hot Topics".to_string(),
updated: now(),
links: vec![Link {
href: "https://www.v2ex.com/".to_string(),
..Default::default()
}],
entries: Self::build_entries(&topics),
..Default::default()
};
Ok(result)
}
}
impl HotTopicsGenerator {
fn build_entries(topics: &[Topic]) -> Vec<Entry> {
topics
.iter()
.map(|x| Entry {
title: x.title.clone(),
content: Some(Content {
value: Some(x.content_rendered.clone()),
src: Some(x.url.clone()),
content_type: Some("html".to_string()),
}),
id: format!("{}", x.id),
updated: x.last_modified,
authors: vec![Person {
name: x.member.username.clone(),
email: None,
uri: Some(x.member.url.clone()),
}],
categories: vec![Category {
term: x.node.name.clone(),
scheme: None,
label: Some(x.node.title.clone()),
}],
links: vec![Link {
href: x.url.clone(),
..Default::default()
}],
published: Some(x.created),
..Default::default()
})
.collect()
}
}
|
//! This module defines utilities for working with the [`std::option::Option`] type.
/// Adds mapping methods to the `Option` type.
pub trait OptionOps {
type Item;
fn map_none <F> (self , f:F) -> Self where F : FnOnce();
fn map_ref <U,F> (&self , f:F) -> Option<U> where F : FnOnce(&Self::Item) -> U;
fn for_each <U,F> (self , f:F) where F : FnOnce(Self::Item) -> U;
fn for_each_ref <U,F> (&self , f:F) where F : FnOnce(&Self::Item) -> U;
fn zip <U> (self , other:Option<U>) -> Option<(Self::Item,U)>;
fn zip_with <U,F,R> (self , other:Option<U>, f:F) -> Option<R>
where F:FnOnce(Self::Item,U) -> R;
/// Returns true if option contains Some with value matching given predicate.
fn contains_if <F> (&self, f:F) -> bool where F : FnOnce(&Self::Item) -> bool;
}
impl<T> OptionOps for Option<T> {
type Item = T;
fn map_none<F>(self, f:F) -> Self
where F:FnOnce(), T:Sized {
if self.is_none() { f() }
self
}
fn map_ref<U,F>(&self, f:F) -> Option<U> where F : FnOnce(&Self::Item) -> U {
self.as_ref().map(f)
}
fn for_each<U,F>(self, f:F) where F : FnOnce(Self::Item) -> U {
if let Some(x) = self { f(x); }
}
fn for_each_ref<U,F>(&self, f:F) where F : FnOnce(&Self::Item) -> U {
if let Some(x) = self { f(x); }
}
fn zip<U>(self, other:Option<U>) -> Option<(T,U)> {
self.zip_with(other, |a,b| (a,b))
}
fn zip_with<U,F,R>(self, other:Option<U>, f:F) -> Option<R>
where F:FnOnce(T,U) -> R {
Some(f(self?,other?))
}
fn contains_if<F>(&self, f:F) -> bool where F : FnOnce(&Self::Item) -> bool {
self.as_ref().map_or(false,f)
}
}
|
use crate::types::*;
use neo4rs_macros::BoltStruct;
#[derive(Debug, PartialEq, Clone, BoltStruct)]
#[signature(0xB1, 0x71)]
pub struct Record {
pub data: BoltList,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::version::Version;
use bytes::*;
use std::cell::RefCell;
use std::rc::Rc;
#[test]
fn should_deserialize_record_message() {
let bytes = Rc::new(RefCell::new(Bytes::from_static(&[
0xB1, 0x71, 0x92, 0x81, 0x61, 0x81, 0x62,
])));
let record: Record = Record::parse(Version::V4_1, bytes).unwrap();
assert_eq!(record.data.len(), 2);
}
}
|
use anyhow::{anyhow, Context};
use byteorder::{BigEndian, WriteBytesExt};
use log::{debug, info, warn};
use pnet::packet::icmp::{
echo_request::{EchoRequestPacket, MutableEchoRequestPacket},
IcmpTypes,
};
use pnet::packet::{ip::IpNextHeaderProtocols::Icmp, Packet};
use pnet::transport::{self, icmp_packet_iter, TransportChannelType, TransportProtocol::Ipv4};
use pnet::util::checksum;
use serde::Deserialize;
use std::net::{IpAddr, Ipv4Addr};
use std::sync::mpsc;
use std::time::Duration;
use std::{fs, io, str, thread};
#[derive(Deserialize)]
pub struct Environment {
pub network_addr: Ipv4Addr,
pub subnet_mask: Ipv4Addr,
pub default_gateway: Ipv4Addr,
#[serde(rename = "dhcp_svr_identifier")]
pub dhcp_svr_addr: Ipv4Addr,
pub dns_svr_addr: Ipv4Addr,
pub lease_time: u32,
}
impl Environment {
pub fn new() -> anyhow::Result<Self> {
let file = fs::File::open("shared/env.json")?;
serde_json::from_reader(file).context("Invalid env file format")
}
}
pub fn big_endian_from(i: u32) -> Result<Vec<u8>, io::Error> {
let mut v = Vec::new();
v.write_u32::<BigEndian>(i)?;
Ok(v)
}
pub fn is_ip_addr_available(target: Ipv4Addr) -> anyhow::Result<()> {
let icmp_buf = new_default_icmp_buf();
let icmp_packet = EchoRequestPacket::new(&icmp_buf).unwrap();
let (mut transport_snd, mut transport_recv) =
transport::transport_channel(1024, TransportChannelType::Layer4(Ipv4(Icmp)))?;
transport_snd.send_to(icmp_packet, IpAddr::V4(target))?;
let (sender, receiver) = mpsc::channel();
thread::spawn(move || {
let mut iter = icmp_packet_iter(&mut transport_recv);
let (packet, _) = iter.next().unwrap();
if packet.get_icmp_type() == IcmpTypes::EchoReply {
match sender.send(true) {
Err(_) => info!("ICMP timeout"),
Ok(_) => return,
}
}
});
if receiver.recv_timeout(Duration::from_millis(200)).is_ok() {
let message = format!("IP address already in use: {}", target);
warn!("{}", message);
Err(anyhow!(message))
} else {
debug!("Not received reply within timeout");
Ok(())
}
}
fn new_default_icmp_buf() -> [u8; 8] {
let mut buf = [0u8; 8];
let mut icmp_packet = MutableEchoRequestPacket::new(&mut buf).unwrap();
icmp_packet.set_icmp_type(IcmpTypes::EchoRequest);
let checksum = checksum(icmp_packet.to_immutable().packet(), 16);
icmp_packet.set_checksum(checksum);
buf
}
pub fn ipv4_addr_from(buf: &[u8]) -> Option<Ipv4Addr> {
if buf.len() == 4 {
Some(Ipv4Addr::new(buf[0], buf[1], buf[2], buf[3]))
} else {
None
}
}
|
use {
super::{ChunkRenderMesher, RenderMeshedChunk},
rough::{
amethyst::{
assets::{AssetStorage, Handle, Loader},
core::{math::Point3, Transform},
ecs::prelude::*,
renderer::{mtl::Material, visibility::BoundingSphere, Mesh},
},
log,
terrain::{
ChunkConsumer, ChunkHandle, ChunkMap, ChunkProducer, ChunkVec, LoadAreas, LoadRadius,
CHUNK_BOUNDING_SPHERE_RADIUS,
},
thread::{KeepGoing, WorkerThreads},
},
std::{
collections::{HashMap, VecDeque},
io,
ops::Deref,
sync::{Arc, Mutex, RwLock},
thread,
time::Duration,
},
};
#[derive(Clone)]
pub struct WorldViewer {
pub view_radius: i32,
}
impl Default for WorldViewer {
fn default() -> Self {
WorldViewer { view_radius: 3 }
}
}
impl Component for WorldViewer {
type Storage = HashMapStorage<Self>;
}
impl LoadRadius for WorldViewer {
fn value(&self) -> i32 {
self.view_radius
}
}
pub struct ChunkMaterial(pub Handle<Material>);
impl Deref for ChunkMaterial {
type Target = Handle<Material>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct ChunkRenderMesherData<'a> {
pub entities: Entities<'a>,
pub chunk_material: Read<'a, ChunkMaterial>,
pub mesh_storage: Read<'a, AssetStorage<Mesh>>,
pub asset_loader: ReadExpect<'a, Loader>,
pub world_viewers: ReadStorage<'a, WorldViewer>,
pub transforms: ReadStorage<'a, Transform>,
pub updater: Read<'a, LazyUpdate>,
}
struct MeshedChunkEntry {
handle: ChunkHandle,
meshed_revision: usize,
}
impl MeshedChunkEntry {
fn new(handle: &ChunkHandle) -> Self {
Self {
handle: handle.clone(),
meshed_revision: handle.revision(),
}
}
fn changed(&self) -> bool {
self.handle.revision() != self.meshed_revision
}
fn update_revision(&mut self) {
self.meshed_revision = self.handle.revision();
}
}
pub struct ChunkRenderMesherWorkerData {
meshed_chunks: RwLock<HashMap<ChunkVec, MeshedChunkEntry>>,
remove_queue: Mutex<VecDeque<ChunkVec>>,
output_queue: Mutex<VecDeque<RenderMeshedChunk>>,
remove_meshes: Mutex<bool>,
}
#[derive(Clone)]
pub struct ChunkRenderMesherWorker {
chunk_map: ChunkMap,
chunk_consumer: ChunkConsumer,
load_areas: LoadAreas<WorldViewer>,
keep_going: KeepGoing,
worker_threads: WorkerThreads,
data: Arc<ChunkRenderMesherWorkerData>,
}
impl ChunkRenderMesherWorker {
const CHUNK_CONSUMER_QUEUE: &'static str = "chunk_render_mesher";
pub fn new(
chunk_map: &ChunkMap,
num_workers: i32,
chunk_producer: &ChunkProducer,
load_areas: LoadAreas<WorldViewer>,
) -> io::Result<Self> {
let num_workers = num_workers.max(1);
log::info!("Starting chunk render mesher with {} workers", num_workers);
let (keep_going, worker_threads) = WorkerThreads::new();
let worker = Self {
chunk_map: chunk_map.clone(),
chunk_consumer: chunk_producer.consume(Self::CHUNK_CONSUMER_QUEUE),
keep_going,
worker_threads: worker_threads.clone(),
load_areas,
data: Arc::new(ChunkRenderMesherWorkerData {
meshed_chunks: RwLock::new(HashMap::new()),
remove_queue: Mutex::new(VecDeque::new()),
output_queue: Mutex::new(VecDeque::new()),
remove_meshes: Mutex::new(true),
}),
};
{
let worker = worker.clone();
worker_threads.spawn("chunk-render-mesher-remove-worker".to_string(), move || {
worker.remove_meshes_worker()
})?;
}
for n in 0..num_workers {
let worker = worker.clone();
worker_threads.spawn(format!("chunk-render-mesher-worker-{}", n), move || {
worker.mesher_worker()
})?;
}
Ok(worker)
}
pub fn stop(&mut self) {
self.keep_going.stop();
if let Err(e) = self.worker_threads.stop() {
log::warn!("{}", e);
}
}
pub fn update_meshes(&mut self, data: ChunkRenderMesherData) {
let ChunkRenderMesherData {
entities,
chunk_material,
mesh_storage,
asset_loader,
world_viewers,
transforms,
updater,
} = data;
if self
.load_areas
.changed(&entities, &world_viewers, &transforms)
{
*self.remove_meshes.lock().unwrap() = true;
}
if let Some(meshed_chunk) = self.next_meshed_chunk() {
let vec = meshed_chunk.vec();
if let Some(entity) = self.chunk_map.get_entity(vec) {
updater.insert(entity, chunk_material.clone());
updater.insert(
entity,
asset_loader.load_from_data(meshed_chunk.mesh_data, (), &mesh_storage),
);
updater.insert(
entity,
BoundingSphere {
center: Point3::new(0.0, 0.0, 0.0),
radius: CHUNK_BOUNDING_SPHERE_RADIUS,
},
);
}
}
if let Ok(Some(vec)) = self.remove_queue.try_lock().map(|mut q| q.pop_front()) {
if let Some(entry) = self.meshed_chunks.write().unwrap().remove(&vec) {
if let Some(entity) = entry.handle.entity() {
log::debug!("Removing render mesh at {}", vec);
updater.remove::<Handle<Mesh>>(entity);
updater.remove::<Handle<Material>>(entity);
updater.remove::<BoundingSphere>(entity);
}
}
}
}
fn next_meshed_chunk(&self) -> Option<RenderMeshedChunk> {
if let Ok(Some(meshed_chunk)) = self.output_queue.try_lock().map(|mut q| q.pop_front()) {
if self.is_active(meshed_chunk.vec()) {
return Some(meshed_chunk);
}
self.remove_queue
.lock()
.unwrap()
.push_back(meshed_chunk.vec());
}
None
}
fn remove_meshes(&self) {
let mut remove_meshes = vec![];
for (vec, _) in self.meshed_chunks.read().unwrap().iter() {
if !self.is_active(*vec) {
remove_meshes.push(*vec);
}
}
self.remove_queue
.lock()
.unwrap()
.extend(remove_meshes.iter());
}
fn is_active(&self, vec: ChunkVec) -> bool {
self.load_areas.is_active(vec)
}
fn build_mesh(&self, handle: ChunkHandle) {
if !(self.needs_mesh(&handle) && self.is_active(handle.vec())) {
return;
}
if let Some(meshed_chunk) =
ChunkRenderMesher::new_unchecked(&self.chunk_map, handle.vec()).build_mesh()
{
{
let mut meshed_chunks = self.meshed_chunks.write().unwrap();
if !meshed_chunks.contains_key(&handle.vec()) {
meshed_chunks.insert(handle.vec(), MeshedChunkEntry::new(&handle));
}
if let Some(entry) = meshed_chunks.get_mut(&handle.vec()) {
entry.update_revision();
}
}
self.output_queue.lock().unwrap().push_back(meshed_chunk);
}
}
fn needs_mesh(&self, handle: &ChunkHandle) -> bool {
if !self.is_meshed(handle) {
return true;
}
if let Some(entry) = self.meshed_chunks.read().unwrap().get(&handle.vec()) {
return entry.changed();
}
false
}
#[inline(always)]
fn should_stop(&self) -> bool {
self.keep_going.should_stop()
}
fn mesher_worker(&self) {
loop {
if self.should_stop() {
return;
}
if let Some(handle) = self.chunk_consumer.pop_front() {
if !self.is_active(handle.vec()) || self.is_meshed(&handle) {
continue;
} else if ChunkRenderMesher::is_meshable(&self.chunk_map, handle.vec()) {
self.build_mesh(handle);
continue;
} else {
self.chunk_consumer.push_front(handle);
}
}
thread::sleep(Duration::from_millis(50));
}
}
fn is_meshed(&self, handle: &ChunkHandle) -> bool {
self.meshed_chunks
.read()
.unwrap()
.contains_key(&handle.vec())
}
fn remove_meshes_worker(&self) {
loop {
if self.should_stop() {
return;
}
let remove_meshes = {
if let Ok(mut remove_meshes) = self.remove_meshes.try_lock() {
let flag = *remove_meshes;
if flag {
*remove_meshes = false;
}
flag
} else {
false
}
};
if remove_meshes {
self.remove_meshes();
}
thread::sleep(Duration::from_millis(50));
}
}
}
impl Deref for ChunkRenderMesherWorker {
type Target = ChunkRenderMesherWorkerData;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.data
}
}
|
use std::sync::atomic::{AtomicU64, Ordering};
use std::{collections::HashMap, time::Instant};
use anyhow::{anyhow, ensure, Result};
use async_fuse::FileType;
use config::Contents;
use menmos_client::{Client, Meta, Query, Type};
use tokio::sync::Mutex;
use crate::{cached_client::CachedClient, concurrent_map::ConcurrentMap};
use crate::{config, write_buffer::WriteBuffer};
use super::virtualdir::VirtualDirectory;
use super::{Error, Result as FSResult};
pub struct MenmosFS {
pub(crate) client: CachedClient,
pub(crate) blobid_to_inode: ConcurrentMap<String, u64>,
pub(crate) inode_to_blobid: ConcurrentMap<u64, String>,
pub(crate) name_to_blobid: ConcurrentMap<(u64, String), String>,
pub(crate) inode_to_last_refresh: ConcurrentMap<u64, Instant>,
pub(crate) virtual_directories_inodes: ConcurrentMap<u64, VirtualDirectory>,
pub(crate) virtual_directories: ConcurrentMap<(u64, String), u64>,
pub(crate) write_buffers: Mutex<HashMap<u64, WriteBuffer>>,
inode_counter: AtomicU64,
}
impl MenmosFS {
pub async fn new(mount: config::Mount) -> Result<Self> {
let client = match mount.client {
config::ClientConfig::Profile { profile } => Client::new_with_profile(profile).await?,
config::ClientConfig::Host {
host,
username,
password,
} => Client::new(host, username, password).await?,
};
let client = CachedClient::new(client);
let fs = Self {
client,
blobid_to_inode: Default::default(),
inode_to_blobid: Default::default(),
name_to_blobid: Default::default(),
inode_counter: AtomicU64::new(3),
inode_to_last_refresh: ConcurrentMap::new(),
virtual_directories_inodes: ConcurrentMap::new(),
virtual_directories: Default::default(),
write_buffers: Default::default(),
};
// Initialize the filesystem roots.
fs.virtual_directories_inodes
.insert(
1,
VirtualDirectory::Mount {
contents: mount.contents,
},
)
.await;
Ok(fs)
}
/// Flushes the write buffer for an inode to the menmos cluster.
///
/// Returns an IO error if the write fails.
pub(crate) async fn flush_buffer(&self, ino: u64, buffer: WriteBuffer) -> FSResult<()> {
let blob_id = self
.inode_to_blobid
.get(&ino)
.await
.ok_or(Error::NotFound)?;
self.client
.write(&blob_id, buffer.offset, buffer.data.freeze())
.await
.map_err(|e| {
log::error!("write error: {}", e);
Error::IOError
})?;
Ok(())
}
/// Gets the inode corresponding to a blob ID.
///
/// If the blob ID wasn't seen before, a new inode is generated and returned.
pub(crate) async fn get_inode(&self, blob_id: &str) -> u64 {
if let Some(inode) = self.blobid_to_inode.get(&String::from(blob_id)).await {
inode
} else {
let inode = self.inode_counter.fetch_add(1, Ordering::SeqCst);
self.blobid_to_inode
.insert(blob_id.to_string(), inode)
.await;
inode
}
}
/// Gets a virtual directory by its parent inode and its name.
///
/// If this directory wasn't seen before, a new inode is generated and returned.
pub(crate) async fn get_virtual_inode(&self, parent_inode: u64, name: &str) -> u64 {
if let Some(inode) = self
.virtual_directories
.get(&(parent_inode, name.to_string()))
.await
{
inode
} else {
let inode = self.inode_counter.fetch_add(1, Ordering::SeqCst);
self.virtual_directories
.insert((parent_inode, name.to_string()), inode)
.await;
inode
}
}
/// Get the metadata for a given inode.
///
/// Returns None if the blob id corresponding on the server doesn't exist,
/// returns an error if there is no blob ID corresponding to the provided inode.
pub(crate) async fn get_meta_by_inode(&self, inode: u64) -> Result<Option<Meta>> {
if let Some(blob_id) = self.inode_to_blobid.get(&inode).await {
Ok(self.client.get_meta(&blob_id).await?)
} else {
Err(anyhow!("unknown inode"))
}
}
/// Lists entries for a physical (backed by a blob on the menmos cluster) directory.
pub(crate) async fn list_entries(
&self,
query: Query,
parent_inode: u64,
) -> Result<Vec<(u64, FileType, String)>> {
// TODO: Actually use paging here.
let results = self.client.query(query.with_size(5000)).await?;
// All directories have "." and ".."
let mut entries = vec![
(parent_inode, FileType::Directory, ".".to_string()),
(parent_inode, FileType::Directory, "..".to_string()),
];
entries.reserve(results.count);
for hit in results.hits.into_iter() {
let inode = self.get_inode(&hit.id).await;
let file_type = if hit.meta.blob_type == menmos_client::Type::Directory {
FileType::Directory
} else {
FileType::RegularFile
};
let blob_id = hit.id;
let name = hit.meta.name.clone();
self.name_to_blobid
.insert((parent_inode, name.clone()), blob_id)
.await;
entries.push((inode, file_type, name))
}
Ok(entries)
}
/// Lists entries for a virtual (created on the client by a query) directory.
pub(crate) async fn list_virtual_entries(
&self,
virtual_directory: VirtualDirectory,
parent_inode: u64,
) -> Result<Vec<(u64, FileType, String)>> {
match virtual_directory {
VirtualDirectory::InMemory(v) => {
// These are all other virtual directories.
let mut entries = vec![
(parent_inode, FileType::Directory, ".".to_string()),
(parent_inode, FileType::Directory, "..".to_string()),
];
entries.reserve(v.len());
for dir_name in v.into_iter() {
let inode = self.get_virtual_inode(parent_inode, &dir_name).await;
entries.push((inode, FileType::Directory, dir_name));
}
Ok(entries)
}
VirtualDirectory::Query { query } => self.list_entries(query, parent_inode).await,
VirtualDirectory::Mount { contents } => match contents {
Contents::Root { root } => {
let mut entries = vec![
(parent_inode, FileType::Directory, ".".to_string()),
(parent_inode, FileType::Directory, "..".to_string()),
];
let item_inode = self.get_inode(&root).await;
let meta = self
.client
.get_meta(&root)
.await?
.ok_or_else(|| anyhow!("mount meta does not exist"))?;
self.name_to_blobid
.insert((parent_inode, meta.name.clone()), root)
.await;
let t = if meta.blob_type == Type::File {
FileType::RegularFile
} else {
FileType::Directory
};
entries.push((item_inode, t, meta.name));
Ok(entries)
}
Contents::Virtual(mounts) => {
let mut entries = vec![
(parent_inode, FileType::Directory, ".".to_string()),
(parent_inode, FileType::Directory, "..".to_string()),
];
// Create a virtual directory for each of our mounts.
for (k, contents) in mounts.into_iter() {
let mount_inode = self.get_virtual_inode(parent_inode, &k).await;
self.virtual_directories_inodes
.insert(mount_inode, VirtualDirectory::Mount { contents })
.await;
entries.push((mount_inode, FileType::Directory, k));
}
Ok(entries)
}
Contents::Query {
expression,
group_by_meta_keys,
group_by_tags,
} => {
let root_query = Query::default().with_expression(expression)?;
let should_group = group_by_tags || !group_by_meta_keys.is_empty();
if should_group {
let mut entries = vec![
(parent_inode, FileType::Directory, ".".to_string()),
(parent_inode, FileType::Directory, "..".to_string()),
];
self.populate_virtual_directories(root_query, parent_inode)
.await?;
// TODO: Will need to use a more unique inode name since we can nest.
let tags_inode = self.get_virtual_inode(parent_inode, "tags").await;
entries.push((tags_inode, FileType::Directory, "tags".to_string()));
for group_key in group_by_meta_keys.iter() {
let kv_inode = self.get_virtual_inode(parent_inode, group_key).await;
entries.push((kv_inode, FileType::Directory, group_key.clone()));
}
Ok(entries)
} else {
// Display the results as a flat list.
self.list_entries(root_query, parent_inode).await
}
}
},
}
}
/// Populates the metadata maps for virtual directories.
///
/// This creates the virtual subdirectories for `query` mounts, creating directories for each tag and k/v pair.
pub(crate) async fn populate_virtual_directories(
&self,
mut query: Query,
parent_inode: u64,
) -> Result<()> {
query.facets = true;
let results = self.client.query(query.clone()).await?;
ensure!(results.facets.is_some(), "missing facets");
let facets = results.facets.unwrap();
// Build "tags" virtual directory.
let tags: Vec<String> = facets.tags.into_iter().map(|(tag, _count)| tag).collect();
let kv: HashMap<String, Vec<String>> = facets
.meta
.into_iter()
.map(|(key, meta_counts)| {
(
key,
meta_counts.into_iter().map(|(key, _count)| key).collect(),
)
})
.collect();
// Register the inode for our tag directory.
let tags_inode = self.get_virtual_inode(parent_inode, "tags").await;
self.virtual_directories_inodes
.insert(tags_inode, VirtualDirectory::InMemory(tags.clone()))
.await;
// Register a virtual directory for every tag.
for tag in tags.into_iter() {
let tag_inode = self.get_virtual_inode(tags_inode, &tag).await;
self.virtual_directories_inodes
.insert(
tag_inode,
VirtualDirectory::Query {
query: query.clone().and_tag(tag.clone()),
},
)
.await;
}
// Build k/v virtual directories
for (key, values) in kv.into_iter() {
let key_inode = self.get_virtual_inode(parent_inode, &key).await;
self.virtual_directories_inodes
.insert(key_inode, VirtualDirectory::InMemory(values.clone()))
.await;
for value in values.into_iter() {
let value_inode = self.get_virtual_inode(key_inode, &value).await;
self.virtual_directories_inodes
.insert(
value_inode,
VirtualDirectory::Query {
query: query.clone().and_meta(key.clone(), value.clone()),
},
)
.await;
}
}
Ok(())
}
}
|
use flate2::read::ZlibDecoder;
use flate2::write::ZlibEncoder;
use flate2::Compression;
use sha1::{Digest, Sha1};
use std::fs::{create_dir_all, read, File};
use std::io::Cursor;
use std::io::Read;
use std::path::PathBuf;
use std::time::SystemTime;
// hack to import both Writes
// https://stackoverflow.com/questions/59187608/can-i-use-write-and-file-write-all-in-same-fn
use std::fmt::Write as _;
use std::io::Write as _;
enum BlobType {
BLOB,
TREE,
COMMIT,
}
struct Blob {
blob_type: BlobType,
sha_val: [u8; 20],
content: Vec<u8>,
}
impl Blob {
// create blob from data
pub fn new_blob_from_data(data: Vec<u8>, blob_type: BlobType) -> Self {
let mut header: Vec<u8> = match blob_type {
BlobType::BLOB => format!("blob {}\x00", data.len().to_string()).into_bytes(),
BlobType::TREE => format!("tree {}\x00", data.len().to_string()).into_bytes(),
BlobType::COMMIT => format!("commit {}\x00", data.len().to_string()).into_bytes(),
};
header.extend(data);
let with_header = header;
// hash data with sha1
let mut hasher = Sha1::new();
hasher.update(&with_header);
let sha_val: [u8; 20] = hasher.finalize().into();
// compress data with zlib encoding
let mut z = ZlibEncoder::new(Vec::new(), Compression::default());
z.write_all(&with_header).unwrap();
let content = z.finish().unwrap();
Blob {
blob_type,
sha_val,
content,
}
}
// create blob from blob object
pub fn new_blob_from_file(path: &PathBuf, blob_type: BlobType) -> Self {
let bytes = read(path).unwrap();
Blob::new_blob_from_data(bytes, blob_type)
}
// create blob from blob object
pub fn new_blob_from_blob_file(blob_sha: &String, blob_type: BlobType) -> Self {
let path = get_path(blob_sha);
let bytes = read(path).unwrap();
let mut z = ZlibDecoder::new(&bytes[..]);
let mut content = Vec::new();
z.read_to_end(&mut content).expect("cannot read blob");
let mut hasher = Sha1::new();
hasher.update(&content);
let sha_val: [u8; 20] = hasher.finalize().into();
Blob {
blob_type,
sha_val,
content,
}
}
// convert compressed sha to ascii string
fn get_sha_string(&self) -> String {
let mut sha_str = String::with_capacity(self.sha_val.len() * 2);
for byte in &self.sha_val {
write!(sha_str, "{:02x}", byte).unwrap();
}
sha_str
}
// create path and store blob content
pub fn write_blob(&self) {
let blob_path = get_path(&self.get_sha_string());
create_dir_all(blob_path.parent().unwrap()).unwrap();
let mut file = File::create(blob_path).unwrap();
file.write_all(&self.content).unwrap();
file.flush().unwrap();
}
}
// get storage path for blob
fn get_path(sha: &str) -> PathBuf {
let dir = &sha[0..2];
let file = &sha[2..];
let path: PathBuf = [".git", "objects", dir, file].iter().collect();
path
}
// create blob from file contents
pub fn read_blob(blob_sha: &String) -> String {
let blob = Blob::new_blob_from_blob_file(blob_sha, BlobType::BLOB);
let data = String::from_utf8(blob.content).unwrap();
// strip header before returning
let i = data.find('\x00').unwrap();
data[i + 1..].to_owned()
}
// create blob from file, write to disk and return sha1 hash
pub fn hash_object(file_path: &str) -> String {
let path = PathBuf::from(file_path);
let blob = Blob::new_blob_from_file(&path, BlobType::BLOB);
blob.write_blob();
blob.get_sha_string()
}
pub fn read_tree_object(tree_sha: &String) -> String {
let blob = Blob::new_blob_from_blob_file(tree_sha, BlobType::TREE);
let data = blob.content;
let mut names: String = String::new();
// skip meta data
let mut cur_index = data.iter().position(|u| *u == '\x00' as u8).unwrap() + 1;
// iterate over file names
while let Some(next_index) = data[cur_index..].iter().position(|u| *u == '\x00' as u8) {
let file_str = std::str::from_utf8(&data[cur_index..cur_index + next_index]).unwrap();
let name = file_str.split(' ').last().unwrap();
names.push_str(name);
names.push('\n');
cur_index = cur_index + next_index + 21; // skip sha
if cur_index >= data.len() {
break;
}
}
names
}
pub fn create_tree_object(dir_path: &str) -> String {
let path = PathBuf::from(dir_path);
let blob = write_tree_object(&path).unwrap(); // dir should not be empty
blob.get_sha_string()
}
struct TreeObject {
is_file: bool,
name: String,
sha_val: [u8; 20],
}
fn write_tree_object(path: &PathBuf) -> Option<Blob> {
let mut contents = Vec::<TreeObject>::new();
// iterate over directory and write blobs for files and directories recursively
for entry in path.read_dir().unwrap() {
let dir_entry = entry.unwrap();
let value = dir_entry.file_type().unwrap();
let name = dir_entry.file_name().to_str().unwrap().to_string();
let path = dir_entry.path();
if name.starts_with(".") {
continue;
}
if value.is_file() {
let blob = Blob::new_blob_from_file(&path, BlobType::BLOB);
let sha_val = blob.sha_val;
blob.write_blob();
contents.push(TreeObject {
is_file: true,
name,
sha_val,
});
} else if value.is_dir() {
if let Some(blob) = write_tree_object(&path) {
let sha_val = blob.sha_val;
blob.write_blob();
contents.push(TreeObject {
is_file: false,
name,
sha_val,
});
}
}
}
if contents.is_empty() {
None
} else {
contents.sort_by(|o1, o2| o1.name.cmp(&o2.name));
let mut blob_content: Vec<u8> = Vec::new();
for content in contents {
let line = if content.is_file {
format!("100644 {}\x00", content.name)
} else {
// git writes 40000 as access mode
// this is different from 040000 which is displayed on
// running `cat-file`
format!("40000 {}\x00", content.name)
};
let mut line = line.into_bytes();
line.extend(content.sha_val.iter());
blob_content.extend(line);
}
let blob = Blob::new_blob_from_data(blob_content, BlobType::TREE);
blob.write_blob();
Some(blob)
}
}
pub fn create_commit(tree_sha: &String, parent_sha: &String, message: &String) -> String {
let time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let offset = String::from("+0530");
let content = if parent_sha.is_empty() {
format!("tree {}\nauthor Alias Anon <a@a.com> {} {}\ncommiter Alias Anon <a@a.com> {} {}\n\n{}\n", tree_sha, time, offset, time, offset, message)
} else {
format!("tree {}\nparent {}\nauthor Alias Anon <a@a.com> {} {}\ncommiter Alias Anon <a@a.com> {} {}\n\n{}\n", tree_sha, parent_sha, time, offset, time, offset, message)
};
let content = content.into_bytes();
let blob = Blob::new_blob_from_data(content, BlobType::COMMIT);
blob.write_blob();
blob.get_sha_string()
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::SSFSTAT2 {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSFSTAT2_TPTRR {
bits: u8,
}
impl ADC_SSFSTAT2_TPTRR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSFSTAT2_TPTRW<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSFSTAT2_TPTRW<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(15 << 0);
self.w.bits |= ((value as u32) & 15) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSFSTAT2_HPTRR {
bits: u8,
}
impl ADC_SSFSTAT2_HPTRR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSFSTAT2_HPTRW<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSFSTAT2_HPTRW<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(15 << 4);
self.w.bits |= ((value as u32) & 15) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSFSTAT2_EMPTYR {
bits: bool,
}
impl ADC_SSFSTAT2_EMPTYR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSFSTAT2_EMPTYW<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSFSTAT2_EMPTYW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 8);
self.w.bits |= ((value as u32) & 1) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct ADC_SSFSTAT2_FULLR {
bits: bool,
}
impl ADC_SSFSTAT2_FULLR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _ADC_SSFSTAT2_FULLW<'a> {
w: &'a mut W,
}
impl<'a> _ADC_SSFSTAT2_FULLW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 12);
self.w.bits |= ((value as u32) & 1) << 12;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:3 - FIFO Tail Pointer"]
#[inline(always)]
pub fn adc_ssfstat2_tptr(&self) -> ADC_SSFSTAT2_TPTRR {
let bits = ((self.bits >> 0) & 15) as u8;
ADC_SSFSTAT2_TPTRR { bits }
}
#[doc = "Bits 4:7 - FIFO Head Pointer"]
#[inline(always)]
pub fn adc_ssfstat2_hptr(&self) -> ADC_SSFSTAT2_HPTRR {
let bits = ((self.bits >> 4) & 15) as u8;
ADC_SSFSTAT2_HPTRR { bits }
}
#[doc = "Bit 8 - FIFO Empty"]
#[inline(always)]
pub fn adc_ssfstat2_empty(&self) -> ADC_SSFSTAT2_EMPTYR {
let bits = ((self.bits >> 8) & 1) != 0;
ADC_SSFSTAT2_EMPTYR { bits }
}
#[doc = "Bit 12 - FIFO Full"]
#[inline(always)]
pub fn adc_ssfstat2_full(&self) -> ADC_SSFSTAT2_FULLR {
let bits = ((self.bits >> 12) & 1) != 0;
ADC_SSFSTAT2_FULLR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:3 - FIFO Tail Pointer"]
#[inline(always)]
pub fn adc_ssfstat2_tptr(&mut self) -> _ADC_SSFSTAT2_TPTRW {
_ADC_SSFSTAT2_TPTRW { w: self }
}
#[doc = "Bits 4:7 - FIFO Head Pointer"]
#[inline(always)]
pub fn adc_ssfstat2_hptr(&mut self) -> _ADC_SSFSTAT2_HPTRW {
_ADC_SSFSTAT2_HPTRW { w: self }
}
#[doc = "Bit 8 - FIFO Empty"]
#[inline(always)]
pub fn adc_ssfstat2_empty(&mut self) -> _ADC_SSFSTAT2_EMPTYW {
_ADC_SSFSTAT2_EMPTYW { w: self }
}
#[doc = "Bit 12 - FIFO Full"]
#[inline(always)]
pub fn adc_ssfstat2_full(&mut self) -> _ADC_SSFSTAT2_FULLW {
_ADC_SSFSTAT2_FULLW { w: self }
}
}
|
use lazy_static::*;
use num_derive::{FromPrimitive, ToPrimitive};
use std::sync::Mutex;
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq, FromPrimitive, ToPrimitive)]
pub enum VerbosityLevel {
Errors = 0,
Warnings = 1,
Infos = 5,
}
lazy_static! {
static ref VERBOSITY_LEVEL: Mutex<VerbosityLevel> = Mutex::new(VerbosityLevel::Warnings);
}
pub fn set_verbosity_level(level: VerbosityLevel) {
*VERBOSITY_LEVEL.lock().unwrap() = level;
}
pub fn verbosity_level() -> VerbosityLevel {
*VERBOSITY_LEVEL.lock().unwrap()
}
/// A notice for devs. These are good for things that are helpful to devs, such
/// as warning that Workers were disabled, which is important to devs but not
/// end users.
pub fn info(msg: &str) {
println!("Info: {}", msg);
}
/// Non-fatal warnings.
pub fn warn(msg: &str) {
println!("Warning: {}", msg);
}
/// Checks if ch is one of the following characters: SPACE, TAB, CR or LF.
pub fn is_space(ch: char) -> bool {
ch == 0x20.into() || ch == 0x09.into() || ch == 0x0D.into() || ch == 0x0A.into()
}
#[cfg(target_arch = "wasm32")]
mod wasm {
use super::*;
use num_traits::{FromPrimitive, ToPrimitive};
use wasm_bindgen::prelude::*;
#[wasm_bindgen(js_name = setVerbosityLevel)]
pub fn set_verbosity_level(level: u8) {
super::set_verbosity_level(VerbosityLevel::from_u8(level).unwrap())
}
#[wasm_bindgen(js_name = getVerbosityLevel)]
pub fn verbosity_level() -> u8 {
super::verbosity_level().to_u8().unwrap()
}
#[wasm_bindgen]
pub fn warn(msg: &str) {
super::warn(msg)
}
#[wasm_bindgen]
pub fn info(msg: &str) {
super::info(msg)
}
#[wasm_bindgen(js_name = isSpace)]
pub fn is_space(ch: u8) -> bool {
super::is_space(ch.into())
}
}
|
/*
8 core power9 @ 3.8Ghz
real 4m4.275s
user 129m54.588s
sys 0m7.761s
*/
#![feature(core_intrinsics)]
mod vec3;
use vec3::Vector3;
use rayon::prelude::*;
use std::{
sync::{Arc, RwLock},
intrinsics::{fmul_fast, fdiv_fast}
};
const G: f32 = 6.673e-11;
#[derive(Debug, Clone, Copy)]
struct Body {
position: Vector3,
velocity: Vector3,
mass: f32,
}
impl Body {
fn update_position(&mut self, duration: f32) {
self.position += self.velocity * duration
}
}
fn update_one_dv_step(b0: &Body, b1: &Body, dv: &mut Vector3, duration: f32) {
unsafe {
// compute gravity
let r = b0.position - b1.position;
let rsquared = Vector3::dotp(&r, &r);
let fg = fdiv_fast(fmul_fast(fmul_fast(G, b0.mass), b1.mass), rsquared);
// compute the normal vector pointing from i to j
let normal = fdiv_fast(1., f32::sqrt(rsquared));
// update the velocity for this step
*dv += r * normal * fg * duration;
}
}
#[derive(Debug, Clone)]
struct Universe(Arc<RwLock<Vec<Body>>>);
impl std::ops::Deref for Universe {
type Target = Arc<RwLock<Vec<Body>>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Universe {
fn len(&self) -> usize {
self.read().unwrap().len()
}
fn update_velocities(&self, dvs: &mut Vec<Vector3>, duration: f32) {
let len = self.len();
dvs.clear();
dvs.par_extend((0..len).into_par_iter().map(|i| {
let v = self.read().unwrap();
let mut dv = Vector3::new(0., 0., 0.);
for j in 0..i {
update_one_dv_step(&v[i], &v[j], &mut dv, duration);
}
for j in i+1..len {
update_one_dv_step(&v[i], &v[j], &mut dv, duration);
}
dv
}));
let mut v = self.write().unwrap();
for i in 0..len {
v[i].velocity += dvs[i];
}
}
fn update_positions(&self, duration: f32) {
let mut v = self.write().unwrap();
for b in v.iter_mut() {
b.update_position(duration);
}
}
fn step(&self, dvs: &mut Vec<Vector3>, duration: f32) {
self.update_velocities(dvs, duration);
self.update_positions(duration);
}
}
// 8000 bodies for 10 minutes
const BODIES: usize = 8000;
const STEP: f32 = 0.1;
const STEPS: usize = 6000;
fn main() {
use rand::prelude::*;
let mut dvs = Vec::with_capacity(BODIES);
let mut r = {
let mut rng = StdRng::from_seed([0; 32]);
move || -> f32 { rng.gen() }
};
let universe = Universe(Arc::new(RwLock::new((0..BODIES).map(|_| {
Body {
position: Vector3::new(r() * 1e9, r() * 1e9, r() * 1e9),
velocity: Vector3::new(r() * 5e2, r() * 5e2, r() * 5e2),
mass: r() * 1e22
}
}).collect::<Vec<Body>>())));
for _ in 0..STEPS {
universe.step(&mut dvs, STEP)
}
}
#[test]
fn two_body_test() {
let mut dvs = Vec::with_capacity(2);
let step = 0.1;
let universe = Universe(Arc::new(RwLock::new(vec![
// the moon
Body {
position: Vector3::new(0., 0., 0.),
velocity: Vector3::new(0., 0., 0.),
mass: 7.34e22
},
// 1000kg meteor
Body {
position: Vector3::new(1_750_000., 0., 0.),
velocity: Vector3::new(0., 1673., 0.),
mass: 1000.
}
])));
for _ in 0..1_000_000_000 {
universe.step(&mut dvs, step);
let v = universe.read().unwrap();
let d = Vector3::distance(&v[0].position, &v[1].position);
assert!(d >= 1_737_100.);
assert!(d <= 1_000_000_000_000.);
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Test metasrv SchemaApi by writing to one node and then reading from another,
//! on a restarted cluster.
use std::time::Duration;
use common_base::base::tokio;
use common_base::base::Stoppable;
use common_meta_client::ClientHandle;
use common_meta_client::MetaGrpcClient;
use common_meta_kvapi::kvapi::KVApi;
use common_meta_kvapi::kvapi::UpsertKVReq;
use common_meta_types::MatchSeq;
use common_meta_types::Operation;
use databend_meta::init_meta_ut;
use tracing::info;
use crate::tests::service::start_metasrv_cluster;
use crate::tests::service::MetaSrvTestContext;
use crate::tests::start_metasrv_with_context;
/// - Start a cluster of 3.
/// - Test upsert kv and read on different nodes.
/// - Stop and restart the cluster.
/// - Test upsert kv and read on different nodes.
#[async_entry::test(worker_threads = 3, init = "init_meta_ut!()", tracing_span = "debug")]
async fn test_kv_api_restart_cluster_write_read() -> anyhow::Result<()> {
fn make_key(tc: &MetaSrvTestContext, k: impl std::fmt::Display) -> String {
let x = &tc.config.raft_config;
format!("t-restart-cluster-{}-{}-{}", x.config_id, x.id, k)
}
async fn test_write_read_on_every_node(
tcs: &[MetaSrvTestContext],
key_suffix: &str,
) -> anyhow::Result<()> {
info!("--- test write on every node: {}", key_suffix);
for tc in tcs.iter() {
let client = tc.grpc_client().await?;
let k = make_key(tc, key_suffix);
let res = client
.upsert_kv(UpsertKVReq {
key: k.clone(),
seq: MatchSeq::GE(0),
value: Operation::Update(k.clone().into_bytes()),
value_meta: None,
})
.await?;
info!("--- upsert res: {:?}", res);
let res = client.get_kv(&k).await?;
let res = res.unwrap();
assert_eq!(k.into_bytes(), res.data);
}
Ok(())
}
let tcs = start_metasrv_cluster(&[0, 1, 2]).await?;
info!("--- test write on a fresh cluster");
test_write_read_on_every_node(&tcs, "1st").await?;
info!("--- shutdown the cluster");
let stopped_tcs = {
let mut stopped_tcs = vec![];
for mut tc in tcs {
// TODO(xp): remove this field, or split MetaSrvTestContext into two struct:
// one for metasrv and one for meta_node
assert!(tc.meta_node.is_none());
let mut srv = tc.grpc_srv.take().unwrap();
srv.stop(None).await?;
stopped_tcs.push(tc);
}
stopped_tcs
};
info!("--- restart the cluster");
let tcs = {
let mut tcs = vec![];
for mut tc in stopped_tcs {
start_metasrv_with_context(&mut tc).await?;
tcs.push(tc);
}
for tc in &tcs {
info!("--- wait until a leader is observed");
// Every tcs[i] contains one meta node in this context.
let g = tc.grpc_srv.as_ref().unwrap();
let meta_node = g.get_meta_node();
let metrics = meta_node
.raft
.wait(timeout())
.metrics(|m| m.current_leader.is_some(), "a leader is observed")
.await?;
info!("got leader, metrics: {:?}", metrics);
}
tcs
};
info!("--- test write on a restarted cluster");
test_write_read_on_every_node(&tcs, "2nd").await?;
Ok(())
}
/// - Start a cluster of 3.
/// - Test upsert kv and read on different nodes.
/// - Stop and restart the cluster.
/// - Test read kv using same grpc client.
#[async_entry::test(worker_threads = 3, init = "init_meta_ut!()", tracing_span = "debug")]
async fn test_kv_api_restart_cluster_token_expired() -> anyhow::Result<()> {
fn make_key(tc: &MetaSrvTestContext, k: impl std::fmt::Display) -> String {
let x = &tc.config.raft_config;
format!("t-restart-cluster-{}-{}-{}", x.config_id, x.id, k)
}
async fn test_write_read_on_every_node(
tcs: &[MetaSrvTestContext],
client: &ClientHandle,
key_suffix: &str,
) -> anyhow::Result<()> {
info!("--- test write on every node: {}", key_suffix);
for (i, tc) in tcs.iter().enumerate() {
let k = make_key(tc, key_suffix);
if i == 0 {
let res = client
.upsert_kv(UpsertKVReq {
key: k.clone(),
seq: MatchSeq::GE(0),
value: Operation::Update(k.clone().into_bytes()),
value_meta: None,
})
.await?;
info!("--- upsert res: {:?}", res);
} else {
let client = tc.grpc_client().await.unwrap();
let res = client
.upsert_kv(UpsertKVReq {
key: k.clone(),
seq: MatchSeq::GE(0),
value: Operation::Update(k.clone().into_bytes()),
value_meta: None,
})
.await?;
info!("--- upsert res: {:?}", res);
}
let res = client.get_kv(&k).await?;
let res = res.unwrap();
assert_eq!(k.into_bytes(), res.data);
}
Ok(())
}
let tcs = start_metasrv_cluster(&[0, 1, 2]).await?;
let client = MetaGrpcClient::try_create(
vec![tcs[0].config.grpc_api_address.clone()],
"root",
"xxx",
None,
Some(Duration::from_secs(10)),
None,
)?;
info!("--- test write on a fresh cluster");
let key_suffix = "1st";
test_write_read_on_every_node(&tcs, &client, key_suffix).await?;
info!("--- shutdown the cluster");
let stopped_tcs = {
let mut stopped_tcs = vec![];
for mut tc in tcs {
assert!(tc.meta_node.is_none());
let mut srv = tc.grpc_srv.take().unwrap();
srv.stop(None).await?;
stopped_tcs.push(tc);
}
stopped_tcs
};
info!("--- restart the cluster");
let tcs = {
let mut tcs = vec![];
for mut tc in stopped_tcs {
start_metasrv_with_context(&mut tc).await?;
tcs.push(tc);
}
for tc in &tcs {
info!("--- wait until a leader is observed");
// Every tcs[i] contains one meta node in this context.
let g = tc.grpc_srv.as_ref().unwrap();
let meta_node = g.get_meta_node();
let metrics = meta_node
.raft
.wait(timeout())
.metrics(|m| m.current_leader.is_some(), "a leader is observed")
.await?;
info!("got leader, metrics: {:?}", metrics);
}
tcs
};
info!("--- read use old client");
let tc = &tcs[0];
let k = make_key(tc, key_suffix);
let res = client.get_kv(&k).await?;
let res = res.unwrap();
assert_eq!(k.into_bytes(), res.data);
Ok(())
}
// Election timeout is 8~12 sec.
// A raft node waits for a interval of election timeout before starting election
fn timeout() -> Option<Duration> {
Some(Duration::from_millis(30_000))
}
|
use super::*;
#[test]
fn without_boolean_right_errors_badarg() {
run!(
|arc_process| strategy::term::is_not_boolean(arc_process.clone()),
|right_boolean| {
prop_assert_is_not_boolean!(result(false.into(), right_boolean), right_boolean);
Ok(())
},
);
}
// `with_false_right_returns_false` in integration tests
// `with_true_right_returns_true` in integration tests
|
use krnl::power::die;
use core::fmt;
#[lang = "eh_personality"]
#[no_mangle]
#[allow(private_no_mangle_fns)]
pub extern "C" fn eh_personality() {}
#[lang = "panic_fmt"]
#[no_mangle]
#[allow(private_no_mangle_fns)]
pub extern "C" fn panic_fmt(
_: fmt::Arguments,
file: &'static str,
line: u32,
column: u32,
) -> () {
// TODO: May deadlock here if CONSOLE lock is held by other guys.
printf!("panic at {} L{}:{}.\n", file, line, column);
unsafe {
die();
}
}
|
use std::str::FromStr;
use std::sync::Arc;
use anyhow::Result;
use hyper::header;
use hyper::StatusCode;
use hyper::{Body, Request, Response, Uri};
use routerify::{ext::RequestExt, RouterBuilder};
use zenith_utils::auth::JwtAuth;
use zenith_utils::http::endpoint::attach_openapi_ui;
use zenith_utils::http::endpoint::auth_middleware;
use zenith_utils::http::endpoint::check_permission;
use zenith_utils::http::error::ApiError;
use zenith_utils::http::{
endpoint,
error::HttpErrorBody,
json::{json_request, json_response},
};
use super::models::BranchCreateRequest;
use super::models::TenantCreateRequest;
use crate::branches::BranchInfo;
use crate::{branches, tenant_mgr, PageServerConf, ZTenantId};
#[derive(Debug)]
struct State {
conf: &'static PageServerConf,
auth: Option<Arc<JwtAuth>>,
allowlist_routes: Vec<Uri>,
}
impl State {
fn new(conf: &'static PageServerConf, auth: Option<Arc<JwtAuth>>) -> Self {
let allowlist_routes = ["/v1/status", "/v1/doc", "/swagger.yml"]
.iter()
.map(|v| v.parse().unwrap())
.collect::<Vec<_>>();
Self {
conf,
auth,
allowlist_routes,
}
}
}
#[inline(always)]
fn get_state(request: &Request<Body>) -> &State {
request
.data::<Arc<State>>()
.expect("unknown state type")
.as_ref()
}
#[inline(always)]
fn get_config(request: &Request<Body>) -> &'static PageServerConf {
get_state(request).conf
}
fn get_request_param<'a>(
request: &'a Request<Body>,
param_name: &str,
) -> Result<&'a str, ApiError> {
match request.param(param_name) {
Some(arg) => Ok(arg),
None => {
return Err(ApiError::BadRequest(format!(
"no {} specified in path param",
param_name
)))
}
}
}
fn parse_request_param<T: FromStr>(
request: &Request<Body>,
param_name: &str,
) -> Result<T, ApiError> {
match get_request_param(request, param_name)?.parse() {
Ok(v) => Ok(v),
Err(_) => Err(ApiError::BadRequest(
"failed to parse tenant id".to_string(),
)),
}
}
// healthcheck handler
async fn status_handler(_: Request<Body>) -> Result<Response<Body>, ApiError> {
Ok(Response::builder()
.status(StatusCode::OK)
.header(header::CONTENT_TYPE, "application/json")
.body(Body::from("{}"))
.map_err(ApiError::from_err)?)
}
async fn branch_create_handler(mut request: Request<Body>) -> Result<Response<Body>, ApiError> {
let request_data: BranchCreateRequest = json_request(&mut request).await?;
check_permission(&request, Some(request_data.tenant_id))?;
let response_data = tokio::task::spawn_blocking(move || {
branches::create_branch(
get_config(&request),
&request_data.name,
&request_data.start_point,
&request_data.tenant_id,
)
})
.await
.map_err(ApiError::from_err)??;
Ok(json_response(StatusCode::CREATED, response_data)?)
}
async fn branch_list_handler(request: Request<Body>) -> Result<Response<Body>, ApiError> {
let tenantid: ZTenantId = parse_request_param(&request, "tenant_id")?;
check_permission(&request, Some(tenantid))?;
let response_data = tokio::task::spawn_blocking(move || {
crate::branches::get_branches(get_config(&request), &tenantid)
})
.await
.map_err(ApiError::from_err)??;
Ok(json_response(StatusCode::OK, response_data)?)
}
// TODO add to swagger
async fn branch_detail_handler(request: Request<Body>) -> Result<Response<Body>, ApiError> {
let tenantid: ZTenantId = parse_request_param(&request, "tenant_id")?;
let branch_name: &str = get_request_param(&request, "branch_name")?;
let conf = get_state(&request).conf;
let path = conf.branch_path(branch_name, &tenantid);
let response_data = tokio::task::spawn_blocking(move || {
let repo = tenant_mgr::get_repository_for_tenant(tenantid)?;
BranchInfo::from_path(path, conf, &tenantid, &repo)
})
.await
.map_err(ApiError::from_err)??;
Ok(json_response(StatusCode::OK, response_data)?)
}
async fn tenant_list_handler(request: Request<Body>) -> Result<Response<Body>, ApiError> {
// check for management permission
check_permission(&request, None)?;
let response_data =
tokio::task::spawn_blocking(move || crate::branches::get_tenants(get_config(&request)))
.await
.map_err(ApiError::from_err)??;
Ok(json_response(StatusCode::OK, response_data)?)
}
async fn tenant_create_handler(mut request: Request<Body>) -> Result<Response<Body>, ApiError> {
// check for management permission
check_permission(&request, None)?;
let request_data: TenantCreateRequest = json_request(&mut request).await?;
let response_data = tokio::task::spawn_blocking(move || {
tenant_mgr::create_repository_for_tenant(get_config(&request), request_data.tenant_id)
})
.await
.map_err(ApiError::from_err)??;
Ok(json_response(StatusCode::CREATED, response_data)?)
}
async fn handler_404(_: Request<Body>) -> Result<Response<Body>, ApiError> {
json_response(
StatusCode::NOT_FOUND,
HttpErrorBody::from_msg("page not found".to_owned()),
)
}
pub fn make_router(
conf: &'static PageServerConf,
auth: Option<Arc<JwtAuth>>,
) -> RouterBuilder<hyper::Body, ApiError> {
let spec = include_bytes!("openapi_spec.yml");
let mut router = attach_openapi_ui(endpoint::make_router(), spec, "/swagger.yml", "/v1/doc");
if auth.is_some() {
router = router.middleware(auth_middleware(|request| {
let state = get_state(request);
if state.allowlist_routes.contains(request.uri()) {
None
} else {
state.auth.as_deref()
}
}))
}
router
.data(Arc::new(State::new(conf, auth)))
.get("/v1/status", status_handler)
.get("/v1/branch/:tenant_id", branch_list_handler)
.get("/v1/branch/:tenant_id/:branch_name", branch_detail_handler)
.post("/v1/branch", branch_create_handler)
.get("/v1/tenant", tenant_list_handler)
.post("/v1/tenant", tenant_create_handler)
.any(handler_404)
}
|
/// Contoh penggunaan Option pada rust karena tidak ada null/nil
/// Option sebagai parameter yang bisa berisi "nilai" atau "kosong"/None
///
struct Person {
fname: &'static str,
lname: &'static str,
mname: Option<&'static str>,
}
fn main() {
// Mengambil nilai dari Option dengan "if let"
// atau bisa juga dengan "unwrap" atau "unwrap_or"
if let Some(name) = get_name(1) {
println!("Nama Anda: {}", name);
}
if let None = get_name(0) {
println!("Nama tidak ketemu!.");
}
let person1 = Person {
fname: "Agus",
lname: "Susilo",
mname: None,
};
println!("{}", get_fullname(person1));
let person2 = Person {
fname: "Agus",
lname: "Susilo",
mname: Some("Berbah"),
};
println!("{}", get_fullname(person2));
}
// Fungsi get_name dapat mengembalikan string atau None
fn get_name(id: i32) -> Option<&'static str> {
if id > 0 {
return Some("Agus Susilo");
}
None
}
// Fungsi get_fullname dapat memproses dengan nama tengah tidak diisi atau
// dengan nama tengah diisi.
fn get_fullname(p: Person) -> String {
match p.mname {
Some(name) => format!("{} {} {}", p.fname, name, p.lname),
None => format!("{} {}", p.fname, p.lname),
}
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_exception::ErrorCode;
use common_exception::Result;
use serde::Deserialize;
use serde::Serialize;
use crate::principal::FileFormatOptions;
use crate::principal::UserIdentity;
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Default)]
#[serde(default)]
pub struct UserDefinedFileFormat {
pub name: String,
pub file_format_options: FileFormatOptions,
pub creator: UserIdentity,
}
impl UserDefinedFileFormat {
pub fn new(name: &str, file_format_options: FileFormatOptions, creator: UserIdentity) -> Self {
Self {
name: name.to_string(),
file_format_options,
creator,
}
}
}
impl TryFrom<Vec<u8>> for UserDefinedFileFormat {
type Error = ErrorCode;
fn try_from(value: Vec<u8>) -> Result<Self> {
match serde_json::from_slice(&value) {
Ok(udf) => Ok(udf),
Err(serialize_error) => Err(ErrorCode::IllegalFileFormat(format!(
"Cannot deserialize user defined file format from bytes. cause {}",
serialize_error
))),
}
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::sync::Arc;
use common_ast::ast::ExplainKind;
use common_catalog::table_context::TableContext;
use common_exception::ErrorCode;
use common_exception::Result;
use super::cost::CostContext;
use super::format::display_memo;
use super::Memo;
use crate::optimizer::cascades::CascadesOptimizer;
use crate::optimizer::distributed::optimize_distributed_query;
use crate::optimizer::runtime_filter::try_add_runtime_filter_nodes;
use crate::optimizer::util::contains_local_table_scan;
use crate::optimizer::HeuristicOptimizer;
use crate::optimizer::SExpr;
use crate::plans::CopyPlan;
use crate::plans::Plan;
use crate::BindContext;
use crate::IndexType;
use crate::MetadataRef;
#[derive(Debug, Clone, Default)]
pub struct OptimizerConfig {
pub enable_distributed_optimization: bool,
}
#[derive(Debug)]
pub struct OptimizerContext {
pub config: OptimizerConfig,
}
impl OptimizerContext {
pub fn new(config: OptimizerConfig) -> Self {
Self { config }
}
}
pub fn optimize(
ctx: Arc<dyn TableContext>,
opt_ctx: Arc<OptimizerContext>,
plan: Plan,
) -> Result<Plan> {
match plan {
Plan::Query {
s_expr,
bind_context,
metadata,
rewrite_kind,
formatted_ast,
ignore_result,
} => Ok(Plan::Query {
s_expr: Box::new(optimize_query(
ctx,
opt_ctx,
metadata.clone(),
bind_context.clone(),
*s_expr,
)?),
bind_context,
metadata,
rewrite_kind,
formatted_ast,
ignore_result,
}),
Plan::Explain { kind, plan } => match kind {
ExplainKind::Raw | ExplainKind::Ast(_) | ExplainKind::Syntax(_) => {
Ok(Plan::Explain { kind, plan })
}
ExplainKind::Memo(_) => {
if let box Plan::Query {
ref s_expr,
ref metadata,
ref bind_context,
..
} = plan
{
let (memo, cost_map) = get_optimized_memo(
ctx,
*s_expr.clone(),
metadata.clone(),
bind_context.clone(),
)?;
Ok(Plan::Explain {
kind: ExplainKind::Memo(display_memo(&memo, &cost_map)?),
plan,
})
} else {
Err(ErrorCode::BadArguments(
"Cannot use EXPLAIN MEMO with a non-query statement",
))
}
}
_ => Ok(Plan::Explain {
kind,
plan: Box::new(optimize(ctx, opt_ctx, *plan)?),
}),
},
Plan::ExplainAnalyze { plan } => Ok(Plan::ExplainAnalyze {
plan: Box::new(optimize(ctx, opt_ctx, *plan)?),
}),
Plan::Copy(v) => {
Ok(Plan::Copy(Box::new(match *v {
CopyPlan::IntoStage {
stage,
path,
validation_mode,
from,
} => {
CopyPlan::IntoStage {
stage,
path,
validation_mode,
// Make sure the subquery has been optimized.
from: Box::new(optimize(ctx, opt_ctx, *from)?),
}
}
into_table => into_table,
})))
}
// Passthrough statements
_ => Ok(plan),
}
}
pub fn optimize_query(
ctx: Arc<dyn TableContext>,
opt_ctx: Arc<OptimizerContext>,
metadata: MetadataRef,
bind_context: Box<BindContext>,
s_expr: SExpr,
) -> Result<SExpr> {
let contains_local_table_scan = contains_local_table_scan(&s_expr, &metadata);
let mut heuristic = HeuristicOptimizer::new(ctx.clone(), bind_context, metadata.clone());
let mut result = heuristic.optimize(s_expr)?;
let mut cascades = CascadesOptimizer::create(ctx.clone(), metadata)?;
result = cascades.optimize(result)?;
// So far, we don't have ability to execute distributed query
// with reading data from local tales(e.g. system tables).
let enable_distributed_query =
opt_ctx.config.enable_distributed_optimization && !contains_local_table_scan;
// Add runtime filter related nodes after cbo
// Because cbo may change join order and we don't want to
// break optimizer due to new added nodes by runtime filter.
// Currently, we only support standalone.
if !enable_distributed_query && ctx.get_settings().get_runtime_filter()? {
result = try_add_runtime_filter_nodes(&result)?;
}
if enable_distributed_query {
result = optimize_distributed_query(ctx.clone(), &result)?;
}
Ok(result)
}
// TODO(leiysky): reuse the optimization logic with `optimize_query`
fn get_optimized_memo(
ctx: Arc<dyn TableContext>,
s_expr: SExpr,
metadata: MetadataRef,
bind_context: Box<BindContext>,
) -> Result<(Memo, HashMap<IndexType, CostContext>)> {
let mut heuristic = HeuristicOptimizer::new(ctx.clone(), bind_context, metadata.clone());
let result = heuristic.optimize(s_expr)?;
let mut cascades = CascadesOptimizer::create(ctx, metadata)?;
cascades.optimize(result)?;
Ok((cascades.memo, cascades.best_cost_map))
}
|
//! Managing undo state
use std::collections::VecDeque;
// for no good reason
const DEFAULT_UNDO_STACK_SIZE: usize = 128;
/// A stack of states that can be undone and redone.
#[derive(Debug)]
pub(crate) struct UndoState<T> {
max_undo_count: usize,
stack: VecDeque<T>,
/// The index in `stack` of the current document.
live_index: usize,
}
impl<T> UndoState<T> {
pub(crate) fn new(init_state: T) -> Self {
Self::new_sized(DEFAULT_UNDO_STACK_SIZE, init_state)
}
fn new_sized(max_undo_count: usize, init_state: T) -> Self {
let mut stack = VecDeque::new();
stack.push_back(init_state);
UndoState {
max_undo_count,
stack,
live_index: 0,
}
}
pub(crate) fn undo(&mut self) -> Option<&T> {
if self.live_index == 0 {
return None;
}
self.live_index -= 1;
self.stack.get(self.live_index)
}
pub(crate) fn redo(&mut self) -> Option<&T> {
if self.live_index == self.stack.len() - 1 {
return None;
}
self.live_index += 1;
self.stack.get(self.live_index)
}
pub(crate) fn add_undo_group(&mut self, item: T) {
if self.live_index < self.stack.len() - 1 {
self.stack.truncate(self.live_index + 1);
}
self.live_index += 1;
self.stack.push_back(item);
if self.stack.len() > self.max_undo_count {
self.stack.pop_front();
self.live_index -= 1;
}
}
/// Modify the state for the currently active undo group.
/// This might be done if an edit occurs that combines with the previous undo,
/// or if we want to save selection state.
pub(crate) fn update_current_undo(&mut self, mut f: impl FnMut(&mut T)) {
f(self.stack.get_mut(self.live_index).unwrap())
}
}
|
use std::fmt::Write as FormatWrite;
use std::io;
use std::io::Stderr;
use std::io::Write;
use std::thread;
use std::thread::JoinHandle;
use libc;
use termion;
use termion::event::Key;
use termion::input::TermRead;
use termion::raw::IntoRawMode;
use termion::raw::RawTerminal;
use backend::*;
use output_log::*;
pub struct RawConsole <'a> {
error_handler: Box <Fn (io::Error) + Send>,
_output: RawTerminal <Stderr>,
_input_thread: JoinHandle <()>,
status_tick_sequence: & 'a [String],
columns: u16,
status_lines: u16,
}
impl <'a> RawConsole <'a> {
pub fn new (
error_handler: Box <Fn (io::Error) + Send>,
status_tick_sequence: & 'a [String],
) -> Option <RawConsole <'a>> {
// setup output
let output =
match io::stderr ().into_raw_mode () {
Ok (terminal) =>
terminal,
Err (_) =>
return None,
};
let columns =
match termion::terminal_size () {
Ok ((columns, _rows)) =>
columns,
Err (_) => 80,
};
// setup input
let input_thread =
thread::spawn (
|| Self::input_thread ());
Some (
RawConsole {
error_handler: error_handler,
_output: output,
columns: columns,
status_lines: 0,
_input_thread: input_thread,
status_tick_sequence: status_tick_sequence,
}
)
}
fn input_thread (
) {
let stdin =
io::stdin ();
for key_result in stdin.keys () {
if let Ok (key) = key_result {
match key {
Key::Ctrl ('c') => {
unsafe {
libc::kill (
libc::getpid (),
libc::SIGINT);
}
},
Key::Ctrl ('z') => {
unsafe {
libc::kill (
libc::getpid (),
libc::SIGSTOP);
}
},
_ => {
// ignore
},
}
}
}
}
fn write_message (
& self,
target: & mut FormatWrite,
message: & str,
) {
write! (
target,
"{}{}\r\n",
if message.len () <= self.columns as usize {
& message
} else {
& message [0 .. self.columns as usize]
},
termion::clear::AfterCursor,
).unwrap ();
}
fn write_running (
& self,
target: & mut FormatWrite,
message: & str,
status: Option <& str>,
) {
if let Some (status) = status {
write! (
target,
"{} ... {}{}\r\n",
if message.len () <= self.columns as usize - status.len () - 5 {
& message
} else {
& message [0 .. self.columns as usize - status.len () - 5]
},
status,
termion::clear::AfterCursor,
).unwrap ();
} else {
write! (
target,
"{} ...{}\r\n",
if message.len () <= self.columns as usize - 4 {
& message
} else {
& message [0 .. self.columns as usize - 4]
},
termion::clear::AfterCursor,
).unwrap ();
}
}
}
impl <'a> Backend for RawConsole <'a> {
fn update (
& mut self,
logs: & [OutputLogInternal],
) {
let mut buffer =
String::new ();
// move up to the start
if self.status_lines > 0 {
write! (
buffer,
"\r{}",
termion::cursor::Up (
self.status_lines),
).unwrap ();
}
// output logs
let old_status_lines = self.status_lines;
self.status_lines = 0;
for log in logs {
if log.state () == OutputLogState::Removed {
continue;
}
if log.state () == OutputLogState::Running
|| self.status_lines > 0 {
self.status_lines += 1;
}
if log.state () == OutputLogState::Running {
if log.denominator () > 0 {
let percent_string =
format! (
"{}%",
log.numerator () * 100 / log.denominator ());
self.write_running (
& mut buffer,
log.message (),
Some (& percent_string));
} else if log.tick () > 0 {
let tick_string =
& self.status_tick_sequence [
(log.tick () as usize - 1)
% self.status_tick_sequence.len ()];
self.write_running (
& mut buffer,
log.message (),
Some (& tick_string));
} else {
self.write_running (
& mut buffer,
log.message (),
None);
}
} else if log.state () == OutputLogState::Complete {
self.write_running (
& mut buffer,
log.message (),
Some ("done"));
} else if log.state () == OutputLogState::Incomplete {
self.write_running (
& mut buffer,
log.message (),
Some ("abort"));
} else if log.state () == OutputLogState::Message {
self.write_message (
& mut buffer,
log.message ());
} else {
unreachable! ();
}
}
if self.status_lines < old_status_lines {
for _index in 0 .. (old_status_lines - self.status_lines) {
write! (
buffer,
"{}\n",
termion::clear::CurrentLine,
).unwrap ();
}
write! (
buffer,
"{}",
termion::cursor::Up (
old_status_lines - self.status_lines),
).unwrap ();
}
write! (
io::stderr (),
"{}",
buffer,
).unwrap_or_else (
|error|
(self.error_handler) (
error)
);
}
fn synchronous (& self) -> bool {
false
}
}
// ex: noet ts=4 filetype=rust
|
// Only used by non-unit zero-sized types
//
macro_rules! monomorphic_marker_type {
($name:ident, $field:ty) => {
#[allow(non_upper_case_globals)]
const _: () = {
monomorphic_marker_type! {@inner $name, $field}
};
};
(@inner $name:ident, $field:ty) => {
const _item_info_const_: abi_stable::type_layout::ItemInfo = abi_stable::make_item_info!();
const _SHARED_VARS_STRINGS_: ::abi_stable::std_types::RStr<'static> =
abi_stable::std_types::RStr::from_str("_marker;");
use ::abi_stable::derive_macro_reexports::{self as __sabi_re, renamed::*};
pub struct _static_(extern "C" fn());
unsafe impl __GetStaticEquivalent_ for $name {
type StaticEquivalent = _static_;
}
#[doc(hidden)]
pub(super) const _MONO_LAYOUT_: &'static __sabi_re::MonoTypeLayout =
&__sabi_re::MonoTypeLayout::from_derive(__sabi_re::_private_MonoTypeLayoutDerive {
name: abi_stable::std_types::RStr::from_str(stringify!($name)),
item_info: _item_info_const_,
data: __sabi_re::MonoTLData::derive_struct(__CompTLFields::new(
abi_stable::std_types::RSlice::from_slice(&[562949953880064u64]),
None,
)),
generics: abi_stable ::
tl_genparams !
(; __StartLen :: new(0u16, 0u16) ; __StartLen ::
new(0u16, 0u16)),
mod_refl_mode: __ModReflMode::Opaque,
repr_attr: __ReprAttr::C,
phantom_fields: abi_stable::std_types::RSlice::from_slice(&[]),
shared_vars: abi_stable::type_layout::MonoSharedVars::new(
_SHARED_VARS_STRINGS_,
abi_stable::std_types::RSlice::from_slice(&[]),
),
});
impl $name {
const __SABI_SHARED_VARS: &'static __sabi_re::SharedVars =
&abi_stable::type_layout::SharedVars::new(
_MONO_LAYOUT_.shared_vars_static(),
abi_stable::_sabi_type_layouts!($field,),
__sabi_re::RSlice::from_slice(&[]),
);
}
unsafe impl __sabi_re::StableAbi for $name {
type IsNonZeroType = __sabi_re::False;
const LAYOUT: &'static __sabi_re::TypeLayout = {
zst_assert! {Self}
&__sabi_re::TypeLayout::from_derive::<Self>(__sabi_re::_private_TypeLayoutDerive {
shared_vars: Self::__SABI_SHARED_VARS,
mono: _MONO_LAYOUT_,
abi_consts: Self::ABI_CONSTS,
data: __sabi_re::GenericTLData::Struct,
tag: None,
extra_checks: None,
})
};
}
};
}
|
//! Serial communication
use crate::clock::Clocks;
use crate::pac;
use embedded_time::rate::{Baud, Extensions};
/// Serial error
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
/// Framing error
Framing,
/// Noise error
Noise,
/// RX buffer overrun
Overrun,
/// Parity check error
Parity,
}
/// Serial configuration
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Config {
pub baudrate: Baud,
pub order: Order,
pub parity: Parity,
pub stopbits: StopBits,
pub wordlength: WordLength,
}
impl Config {
/// Sets the target baudrate
pub fn baudrate(mut self, baudrate: impl Into<Baud>) -> Self {
self.baudrate = baudrate.into();
self
}
/// Sets parity to no parity check
pub fn parity_none(mut self) -> Self {
self.parity = Parity::ParityNone;
self
}
/// Sets parity check to even
pub fn parity_even(mut self) -> Self {
self.parity = Parity::ParityEven;
self
}
/// Sets parity check to odd
pub fn parity_odd(mut self) -> Self {
self.parity = Parity::ParityOdd;
self
}
/// Sets the target stopbits
pub fn stopbits(mut self, stopbits: StopBits) -> Self {
self.stopbits = stopbits;
self
}
}
impl Default for Config {
fn default() -> Config {
Config {
baudrate: 115_200_u32.Bd(),
order: Order::LsbFirst,
parity: Parity::ParityNone,
stopbits: StopBits::STOP1,
wordlength: WordLength::Eight,
}
}
}
/// Order of the bits transmitted and received on the wire
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Order {
/// Each byte is sent out LSB-first
LsbFirst,
/// Each byte is sent out MSB-first
MsbFirst,
}
/// Parity check
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Parity {
/// No parity check
ParityNone,
/// Even parity bit
ParityEven,
/// Odd parity bit
ParityOdd,
}
/// Stop bits
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum StopBits {
/// 1 stop bit
STOP1,
/// 0.5 stop bits
STOP0P5,
/// 2 stop bits
STOP2,
/// 1.5 stop bits
STOP1P5,
}
/// Word length
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum WordLength {
Five,
Six,
Seven,
Eight,
}
/// Interrupt event
pub enum Event {
/// UART RX FIFO error interrupt
RxFifoError,
/// UART TX FIFO error interrupt
TxFifoError,
/// UART RX parity check error interrupt
RxParityError,
/// UART RX Time-out interrupt
RxTimeout,
/// UART RX FIFO ready (rx_fifo_cnt > rx_fifo_th) interrupt
RxFifoReady,
/// UART TX FIFO ready (tx_fifo_cnt > tx_fifo_th) interrupt
TxFifoReady,
/// UART RX transfer end interrupt
RxTransferEnd,
/// UART TX transfer end interrupt
TxTransferEnd,
}
/// Serial abstraction
pub struct Serial<UART, PINS> {
uart: UART,
pins: PINS,
}
impl<PINS> Serial<pac::UART, PINS>
where
PINS: Pins<pac::UART>,
{
// todo: there is UART0 and UART1
pub fn uart0(uart: pac::UART, config: Config, pins: PINS, clocks: Clocks) -> Self {
// Initialize clocks and baudrate
let uart_clk = clocks.uart_clk();
let baud = config.baudrate.0;
let divisor = {
let ans = uart_clk.0 / baud;
if !(1..=65535).contains(&ans) {
panic!("impossible baudrate");
}
ans as u16
};
uart.uart_bit_prd.write(|w| unsafe {
w.cr_urx_bit_prd()
.bits(divisor - 1)
.cr_utx_bit_prd()
.bits(divisor - 1)
});
// Bit inverse configuration; MsbFirst => 1, LsbFirst => 0
let order_cfg = match config.order {
Order::LsbFirst => false,
Order::MsbFirst => true,
};
uart.data_config
.write(|w| w.cr_uart_bit_inv().bit(order_cfg));
// UART TX config
let data_bits_cfg = match config.wordlength {
WordLength::Five => 4,
WordLength::Six => 5,
WordLength::Seven => 6,
WordLength::Eight => 7,
};
let stop_bits_cfg = match config.stopbits {
StopBits::STOP0P5 => 0,
StopBits::STOP1 => 1,
StopBits::STOP1P5 => 2,
StopBits::STOP2 => 3,
};
let (parity_enable, parity_type) = match config.parity {
Parity::ParityNone => (false, false),
Parity::ParityEven => (true, false), // even => 0
Parity::ParityOdd => (true, true), // odd => 1
};
uart.utx_config.write(|w| unsafe {
w.cr_utx_prt_en()
.bit(parity_enable)
.cr_utx_prt_sel()
.bit(parity_type)
.cr_utx_bit_cnt_d()
.bits(data_bits_cfg)
.cr_utx_bit_cnt_p()
.bits(stop_bits_cfg)
.cr_utx_frm_en()
.set_bit() // [!] freerun on // todo
.cr_utx_cts_en()
.bit(PINS::HAS_CTS)
.cr_utx_en()
.bit(PINS::HAS_TX)
});
// UART RX config
uart.urx_config.write(|w| unsafe {
w.cr_urx_prt_en()
.bit(parity_enable)
.cr_urx_prt_sel()
.bit(parity_type)
.cr_urx_bit_cnt_d()
.bits(data_bits_cfg)
.cr_urx_deg_en()
.clear_bit() // no rx input de-glitch // todo
.cr_urx_rts_sw_mode()
.clear_bit() // no RTS // todo
.cr_urx_en()
.bit(PINS::HAS_RX)
});
Serial { uart, pins }
}
pub fn free(self) -> (pac::UART, PINS) {
// todo!
(self.uart, self.pins)
}
}
impl<PINS> embedded_hal::serial::Write<u8> for Serial<pac::UART, PINS> {
type Error = Error;
fn try_write(&mut self, word: u8) -> nb::Result<(), Self::Error> {
self.uart
.uart_fifo_wdata
.write(|w| unsafe { w.bits(word as u32) });
Ok(())
}
fn try_flush(&mut self) -> nb::Result<(), Self::Error> {
if self.uart.uart_fifo_config_1.read().tx_fifo_cnt().bits() < 1 {
Err(nb::Error::WouldBlock)
} else {
Ok(())
}
}
}
impl<PINS> embedded_hal::serial::Read<u8> for Serial<pac::UART, PINS> {
type Error = Error;
fn try_read(&mut self) -> nb::Result<u8, Self::Error> {
let ans = self.uart.uart_fifo_rdata.read().bits();
Ok((ans & 0xff) as u8)
}
}
/// Serial transmit pins - DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait TxPin<UART> {}
/// Serial receive pins - DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait RxPin<UART> {}
/// Serial rts pins - DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait RtsPin<UART> {}
/// Serial cts pins - DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait CtsPin<UART> {}
macro_rules! impl_uart_pin {
($(($UartSigi: ident, $UartMuxi: ident),)+) => {
use crate::gpio::*;
$(
unsafe impl<PIN: UartPin<$UartSigi>> TxPin<pac::UART> for (PIN, $UartMuxi<Uart0Tx>) {}
unsafe impl<PIN: UartPin<$UartSigi>> RxPin<pac::UART> for (PIN, $UartMuxi<Uart0Rx>) {}
unsafe impl<PIN: UartPin<$UartSigi>> RtsPin<pac::UART> for (PIN, $UartMuxi<Uart0Rts>) {}
unsafe impl<PIN: UartPin<$UartSigi>> CtsPin<pac::UART> for (PIN, $UartMuxi<Uart0Cts>) {}
// unsafe impl<PIN: UartPin, SIG: UartSig<Uart1Tx>> TxPin<pac::UART> for (PIN, SIG) {}
// unsafe impl<PIN: UartPin, SIG: UartSig<Uart1Rx>> RxPin<pac::UART> for (PIN, SIG) {}
// unsafe impl<PIN: UartPin, SIG: UartSig<Uart1Rts>> RtsPin<pac::UART> for (PIN, SIG) {}
// unsafe impl<PIN: UartPin, SIG: UartSig<Uart1Cts>> CtsPin<pac::UART> for (PIN, SIG) {}
)+
};
}
impl_uart_pin!(
(UartSig0, UartMux0),
(UartSig1, UartMux1),
(UartSig2, UartMux2),
(UartSig3, UartMux3),
(UartSig4, UartMux4),
(UartSig5, UartMux5),
(UartSig6, UartMux6),
(UartSig7, UartMux7),
);
/// Serial pins - DO NOT IMPLEMENT THIS TRAIT
pub unsafe trait Pins<UART> {
const HAS_TX: bool;
const HAS_RX: bool;
const HAS_RTS: bool;
const HAS_CTS: bool;
}
unsafe impl<UART, TX, RX> Pins<UART> for (TX, RX)
where
TX: TxPin<UART>,
RX: RxPin<UART>,
{
const HAS_TX: bool = true;
const HAS_RX: bool = true;
const HAS_RTS: bool = false;
const HAS_CTS: bool = false;
}
unsafe impl<UART, TX, RX, RTS, CTS> Pins<UART> for (TX, RX, RTS, CTS)
where
TX: TxPin<UART>,
RX: RxPin<UART>,
RTS: RxPin<UART>,
CTS: RxPin<UART>,
{
const HAS_TX: bool = true;
const HAS_RX: bool = true;
const HAS_RTS: bool = true;
const HAS_CTS: bool = true;
}
|
use anyhow::anyhow;
use anyhow::{Context, Result};
use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
use control_plane::compute::ComputeControlPlane;
use control_plane::local_env;
use control_plane::storage::PageServerNode;
use pageserver::defaults::{DEFAULT_HTTP_LISTEN_PORT, DEFAULT_PG_LISTEN_PORT};
use std::collections::HashMap;
use std::process::exit;
use std::str::FromStr;
use zenith_utils::auth::{encode_from_key_path, Claims, Scope};
use zenith_utils::postgres_backend::AuthType;
use zenith_utils::zid::{ZTenantId, ZTimelineId};
use pageserver::branches::BranchInfo;
///
/// Branches tree element used as a value in the HashMap.
///
struct BranchTreeEl {
/// `BranchInfo` received from the `pageserver` via the `branch_list` libpq API call.
pub info: BranchInfo,
/// Holds all direct children of this branch referenced using `timeline_id`.
pub children: Vec<String>,
}
// Main entry point for the 'zenith' CLI utility
//
// This utility helps to manage zenith installation. That includes following:
// * Management of local postgres installations running on top of the
// pageserver.
// * Providing CLI api to the pageserver
// * TODO: export/import to/from usual postgres
fn main() -> Result<()> {
let timeline_arg = Arg::with_name("timeline")
.short("n")
.index(1)
.help("Timeline name")
.required(true);
let tenantid_arg = Arg::with_name("tenantid")
.long("tenantid")
.help("Tenant id. Represented as a hexadecimal string 32 symbols length")
.takes_value(true)
.required(false);
let port_arg = Arg::with_name("port")
.long("port")
.required(false)
.value_name("port");
let matches = App::new("Zenith CLI")
.setting(AppSettings::ArgRequiredElseHelp)
.subcommand(
SubCommand::with_name("init")
.about("Initialize a new Zenith repository")
.arg(
Arg::with_name("pageserver-pg-port")
.long("pageserver-pg-port")
.required(false)
.value_name("pageserver-pg-port"),
)
.arg(
Arg::with_name("pageserver-http-port")
.long("pageserver-http-port")
.required(false)
.value_name("pageserver-http-port"),
)
.arg(
Arg::with_name("enable-auth")
.long("enable-auth")
.takes_value(false)
.help("Enable authentication using ZenithJWT")
),
)
.subcommand(
SubCommand::with_name("branch")
.about("Create a new branch")
.arg(Arg::with_name("branchname").required(false).index(1))
.arg(Arg::with_name("start-point").required(false).index(2))
.arg(tenantid_arg.clone()),
).subcommand(
SubCommand::with_name("tenant")
.setting(AppSettings::ArgRequiredElseHelp)
.about("Manage tenants")
.subcommand(SubCommand::with_name("list"))
.subcommand(SubCommand::with_name("create").arg(Arg::with_name("tenantid").required(false).index(1)))
)
.subcommand(SubCommand::with_name("status"))
.subcommand(SubCommand::with_name("start").about("Start local pageserver"))
.subcommand(SubCommand::with_name("stop").about("Stop local pageserver"))
.subcommand(SubCommand::with_name("restart").about("Restart local pageserver"))
.subcommand(
SubCommand::with_name("pg")
.setting(AppSettings::ArgRequiredElseHelp)
.about("Manage postgres instances")
.subcommand(SubCommand::with_name("list").arg(tenantid_arg.clone()))
.subcommand(SubCommand::with_name("create")
.about("Create a postgres compute node")
.arg(timeline_arg.clone()).arg(tenantid_arg.clone()).arg(port_arg.clone())
.arg(
Arg::with_name("config-only")
.help("Don't do basebackup, create compute node with only config files")
.long("config-only")
.required(false)
))
.subcommand(SubCommand::with_name("start")
.about("Start a postgres compute node.\n This command actually creates new node from scratch, but preserves existing config files")
.arg(
timeline_arg.clone()
).arg(
tenantid_arg.clone()
).arg(port_arg.clone()))
.subcommand(
SubCommand::with_name("stop")
.arg(timeline_arg.clone())
.arg(tenantid_arg.clone())
.arg(
Arg::with_name("destroy")
.help("Also delete data directory (now optional, should be default in future)")
.long("destroy")
.required(false)
)
)
)
.get_matches();
// Create config file
if let ("init", Some(init_match)) = matches.subcommand() {
let tenantid = ZTenantId::generate();
let pageserver_pg_port = match init_match.value_of("pageserver-pg-port") {
Some(v) => v.parse()?,
None => DEFAULT_PG_LISTEN_PORT,
};
let pageserver_http_port = match init_match.value_of("pageserver-http-port") {
Some(v) => v.parse()?,
None => DEFAULT_HTTP_LISTEN_PORT,
};
let auth_type = if init_match.is_present("enable-auth") {
AuthType::ZenithJWT
} else {
AuthType::Trust
};
local_env::init(
pageserver_pg_port,
pageserver_http_port,
tenantid,
auth_type,
)
.with_context(|| "Failed to create config file")?;
}
// all other commands would need config
let env = match local_env::load_config() {
Ok(conf) => conf,
Err(e) => {
eprintln!("Error loading config: {}", e);
exit(1);
}
};
match matches.subcommand() {
("init", Some(init_match)) => {
let pageserver = PageServerNode::from_env(&env);
if let Err(e) = pageserver.init(
Some(&env.tenantid.to_string()),
init_match.is_present("enable-auth"),
) {
eprintln!("pageserver init failed: {}", e);
exit(1);
}
}
("tenant", Some(args)) => {
if let Err(e) = handle_tenant(args, &env) {
eprintln!("tenant command failed: {}", e);
exit(1);
}
}
("branch", Some(sub_args)) => {
if let Err(e) = handle_branch(sub_args, &env) {
eprintln!("branch command failed: {}", e);
exit(1);
}
}
("start", Some(_sub_m)) => {
let pageserver = PageServerNode::from_env(&env);
if let Err(e) = pageserver.start() {
eprintln!("pageserver start failed: {}", e);
exit(1);
}
}
("stop", Some(_sub_m)) => {
let pageserver = PageServerNode::from_env(&env);
if let Err(e) = pageserver.stop() {
eprintln!("pageserver stop failed: {}", e);
exit(1);
}
}
("restart", Some(_sub_m)) => {
let pageserver = PageServerNode::from_env(&env);
if let Err(e) = pageserver.stop() {
eprintln!("pageserver stop failed: {}", e);
exit(1);
}
if let Err(e) = pageserver.start() {
eprintln!("pageserver start failed: {}", e);
exit(1);
}
}
("status", Some(_sub_m)) => {}
("pg", Some(pg_match)) => {
if let Err(e) = handle_pg(pg_match, &env) {
eprintln!("pg operation failed: {:?}", e);
exit(1);
}
}
_ => {}
};
Ok(())
}
///
/// Prints branches list as a tree-like structure.
///
fn print_branches_tree(branches: Vec<BranchInfo>) -> Result<()> {
let mut branches_hash: HashMap<String, BranchTreeEl> = HashMap::new();
// Form a hash table of branch timeline_id -> BranchTreeEl.
for branch in &branches {
branches_hash.insert(
branch.timeline_id.to_string(),
BranchTreeEl {
info: branch.clone(),
children: Vec::new(),
},
);
}
// Memorize all direct children of each branch.
for branch in &branches {
if let Some(tid) = &branch.ancestor_id {
branches_hash
.get_mut(tid)
.with_context(|| "missing branch info in the HashMap")?
.children
.push(branch.timeline_id.to_string());
}
}
// Sort children by tid to bring some minimal order.
for branch in &mut branches_hash.values_mut() {
branch.children.sort();
}
for branch in branches_hash.values() {
// Start with root branches (no ancestors) first.
// Now there is 'main' branch only, but things may change.
if branch.info.ancestor_id.is_none() {
print_branch(0, &Vec::from([true]), branch, &branches_hash)?;
}
}
Ok(())
}
///
/// Recursively prints branch info with all its children.
///
fn print_branch(
nesting_level: usize,
is_last: &[bool],
branch: &BranchTreeEl,
branches: &HashMap<String, BranchTreeEl>,
) -> Result<()> {
// Draw main padding
print!(" ");
if nesting_level > 0 {
let lsn = branch
.info
.ancestor_lsn
.as_ref()
.with_context(|| "missing branch info in the HashMap")?;
let mut br_sym = "┣━";
// Draw each nesting padding with proper style
// depending on whether its branch ended or not.
if nesting_level > 1 {
for l in &is_last[1..is_last.len() - 1] {
if *l {
print!(" ");
} else {
print!("┃ ");
}
}
}
// We are the last in this sub-branch
if *is_last.last().unwrap() {
br_sym = "┗━";
}
print!("{} @{}: ", br_sym, lsn);
}
// Finally print a branch name with new line
println!("{}", branch.info.name);
let len = branch.children.len();
let mut i: usize = 0;
let mut is_last_new = Vec::from(is_last);
is_last_new.push(false);
for child in &branch.children {
i += 1;
// Mark that the last padding is the end of the branch
if i == len {
if let Some(last) = is_last_new.last_mut() {
*last = true;
}
}
print_branch(
nesting_level + 1,
&is_last_new,
branches
.get(child)
.with_context(|| "missing branch info in the HashMap")?,
branches,
)?;
}
Ok(())
}
/// Returns a map of timeline IDs to branch_name@lsn strings.
/// Connects to the pageserver to query this information.
fn get_branch_infos(
env: &local_env::LocalEnv,
tenantid: &ZTenantId,
) -> Result<HashMap<ZTimelineId, BranchInfo>> {
let page_server = PageServerNode::from_env(env);
let branch_infos: Vec<BranchInfo> = page_server.branch_list(tenantid)?;
let branch_infos: HashMap<ZTimelineId, BranchInfo> = branch_infos
.into_iter()
.map(|branch_info| (branch_info.timeline_id, branch_info))
.collect();
Ok(branch_infos)
}
fn handle_tenant(tenant_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
let pageserver = PageServerNode::from_env(env);
match tenant_match.subcommand() {
("list", Some(_)) => {
for tenant in pageserver.tenant_list()? {
println!("{}", tenant);
}
}
("create", Some(create_match)) => {
let tenantid = match create_match.value_of("tenantid") {
Some(tenantid) => ZTenantId::from_str(tenantid)?,
None => ZTenantId::generate(),
};
println!("using tenant id {}", tenantid);
pageserver.tenant_create(tenantid)?;
println!("tenant successfully created on the pageserver");
}
_ => {}
}
Ok(())
}
fn handle_branch(branch_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
let pageserver = PageServerNode::from_env(env);
if let Some(branchname) = branch_match.value_of("branchname") {
let startpoint_str = branch_match
.value_of("start-point")
.ok_or_else(|| anyhow!("Missing start-point"))?;
let tenantid: ZTenantId = branch_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
let branch = pageserver.branch_create(branchname, startpoint_str, &tenantid)?;
println!(
"Created branch '{}' at {:?} for tenant: {}",
branch.name, branch.latest_valid_lsn, tenantid,
);
} else {
let tenantid: ZTenantId = branch_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
// No arguments, list branches for tenant
let branches = pageserver.branch_list(&tenantid)?;
print_branches_tree(branches)?;
}
Ok(())
}
fn handle_pg(pg_match: &ArgMatches, env: &local_env::LocalEnv) -> Result<()> {
let mut cplane = ComputeControlPlane::load(env.clone())?;
match pg_match.subcommand() {
("list", Some(list_match)) => {
let tenantid: ZTenantId = list_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
let branch_infos = get_branch_infos(env, &tenantid).unwrap_or_else(|e| {
eprintln!("Failed to load branch info: {}", e);
HashMap::new()
});
println!("BRANCH\tADDRESS\t\tLSN\t\tSTATUS");
for ((_, timeline_name), node) in cplane
.nodes
.iter()
.filter(|((node_tenantid, _), _)| node_tenantid == &tenantid)
{
println!(
"{}\t{}\t{}\t{}",
timeline_name,
node.address,
branch_infos
.get(&node.timelineid)
.map(|bi| bi.latest_valid_lsn.to_string())
.unwrap_or_else(|| "?".to_string()),
node.status(),
);
}
}
("create", Some(create_match)) => {
let tenantid: ZTenantId = create_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
let timeline_name = create_match.value_of("timeline").unwrap_or("main");
let port: Option<u16> = match create_match.value_of("port") {
Some(p) => Some(p.parse()?),
None => None,
};
cplane.new_node(tenantid, timeline_name, port)?;
}
("start", Some(start_match)) => {
let tenantid: ZTenantId = start_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
let timeline_name = start_match.value_of("timeline").unwrap_or("main");
let port: Option<u16> = match start_match.value_of("port") {
Some(p) => Some(p.parse()?),
None => None,
};
let node = cplane.nodes.get(&(tenantid, timeline_name.to_owned()));
let auth_token = if matches!(env.auth_type, AuthType::ZenithJWT) {
let claims = Claims::new(Some(tenantid), Scope::Tenant);
Some(encode_from_key_path(&claims, &env.private_key_path)?)
} else {
None
};
println!(
"Starting {} postgres on timeline {}...",
if node.is_some() { "existing" } else { "new" },
timeline_name
);
if let Some(node) = node {
node.start(&auth_token)?;
} else {
// when used with custom port this results in non obvious behaviour
// port is remembered from first start command, i e
// start --port X
// stop
// start <-- will also use port X even without explicit port argument
let node = cplane.new_node(tenantid, timeline_name, port)?;
node.start(&auth_token)?;
}
}
("stop", Some(stop_match)) => {
let timeline_name = stop_match.value_of("timeline").unwrap_or("main");
let destroy = stop_match.is_present("destroy");
let tenantid: ZTenantId = stop_match
.value_of("tenantid")
.map_or(Ok(env.tenantid), |value| value.parse())?;
let node = cplane
.nodes
.get(&(tenantid, timeline_name.to_owned()))
.ok_or_else(|| anyhow!("postgres {} is not found", timeline_name))?;
node.stop(destroy)?;
}
_ => {}
}
Ok(())
}
|
mod sync_channel;
mod watch;
|
use std::sync::Arc;
use super::Fact;
pub enum Filter<In> {
Fact(Box<dyn Fact<In>>),
And(Vec<Self>),
Or(Vec<Self>),
}
impl<In> Filter<In> {
pub fn empty() -> Self {
Self::And(vec![])
}
pub fn fact<F: Fact<In> + 'static>(fact: F) -> Self {
Filter::Fact(Box::new(fact))
}
pub fn and(self, other: Self) -> Self {
Filter::And(vec![self, other])
}
pub fn or(self, other: Self) -> Self {
Filter::Or(vec![self, other])
}
pub fn into_table(self) -> Vec<Vec<Arc<dyn Fact<In>>>> {
into_table(vec![vec![]], self)
}
}
fn into_table<In>(
mut acc: Vec<Vec<Arc<dyn Fact<In>>>>,
filter: Filter<In>,
) -> Vec<Vec<Arc<dyn Fact<In>>>> {
match filter {
Filter::Fact(fact) => {
let fact = Arc::from(fact);
for row in &mut acc {
row.push(Arc::clone(&fact));
}
acc
}
Filter::And(filters) => filters
.into_iter()
.fold(acc, |acc, filter| into_table(acc, filter)),
Filter::Or(filters) => {
let mut out = Vec::new();
for filter in filters {
let alt = into_table(acc.clone(), filter);
out.extend(alt);
}
out
}
}
}
|
macro_rules! header_display {
( ) => {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{}:{}", self.header_name(), self.value)
}
};
}
macro_rules! header {
( $header:ident, $name:expr $(,$types:ty $(, $default:expr )?)? ) => {
paste! {
#[derive(Eq, PartialEq, Clone)]
pub struct [<$header Value>] {
value: or_else_type!($($types)?,&'static str),
}
impl Default for [<$header Value>] {
fn default() -> Self {
[<$header Value>] {
value: or_else!($($($default)?)?,EMPTY),
}
}
}
impl [<$header Value>] {
pub const NAME: &'static str = $name;
pub(crate) fn new(value: or_else_type!($($types)?,&'static str)) -> Self {
[<$header Value>] { value }
}
pub(crate) fn from_owned(_value: or_else_type!($($types)?,String)) -> Self {
choose_from_presence!($($types)? {
Self::new(_value)
}, {
panic!("Macro error, should never be called");
})
}
pub(crate) fn from_str<'a>(input: &'a str) -> Result<[<$header Value>], StompParseError> {
choose_from_presence!($($types)? ($($types)?::from_str(input).map([<$header Value>]::new)
.map_err(|_| StompParseError::new("[<Error Parsing $header Value>]"))), (Ok([<$header Value>]::new(
unsafe { std::mem::transmute::<&'a str,&'static str>(input)}
))))
}
pub fn value(&self) -> & or_else_type!($($types)?,str) {
choose_from_presence!($($types)? {&self.value}, {&self.value})
}
}
if_not_present!($($types)? (impl DecodableValue for [<$header Value>] {
fn decoded_value(&self) -> Result<Either<&str, String>, StompParseError> {
decode_str(self.value())
}
}
));
impl HeaderValue for [<$header Value>] {
type OwnedValue = or_else_type!($($types)?,String);
type Value=or_else_type!($($types)?,&'static str);
const OWNED: bool = choose_from_presence!($($types)? true, false);
fn header_name(&self) -> &str {
[<$header Value>]::NAME
}
}
impl Into<or_else_type!($($types)?,&str)> for [<$header Value>] {
fn into(self) -> or_else_type!($($types)?,&'static str) {
self.value
}
}
impl std::fmt::Display for [<$header Value>] {
header_display!( );
}
impl std::fmt::Debug for [<$header Value>] {
header_display!( );
}
}
};
}
macro_rules! headers {
( $( ($header:ident, $name:literal $(,$types:ty $(, $default:expr )?)? ) ),* ) => {
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct CustomValue {
name: &'static str,
value: &'static str
}
impl CustomValue {
pub fn new(name: &'static str, value: &'static str) -> Self {
CustomValue {
name,
value
}
}
pub fn value(&self) -> &&'static str {
&self.value
}
pub fn decoded_name(&self) -> Result<Either<&str, String>, StompParseError> {
decode_str(self.name)
}
}
impl DecodableValue for CustomValue {
fn decoded_value(&self) -> Result<Either<&str, String>, StompParseError> {
decode_str(self.value())
}
}
impl HeaderValue for CustomValue {
type OwnedValue = String;
type Value = &'static str;
const OWNED: bool = false;
fn header_name(&self) -> &str {
&self.name
}
}
impl std::fmt::Display for CustomValue {
header_display!( );
}
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum HeaderType {
$(
$header
),*
// ,Custom(&'static str)
}
impl HeaderType {
pub fn matches(&self, name: &str) -> bool {
match self {
$(
HeaderType::$header => name == $name,
)*
// HeaderType::Custom(header_name) => &name == header_name
}
}
}
impl TryFrom<&'static str> for HeaderType {
type Error = StompParseError;
fn try_from(input: &'static str) -> std::result::Result<HeaderType, StompParseError> {
match(input) {
$(
$name => Ok(HeaderType::$header),
)*
_ => panic!("Not a known Header")
// name => Ok(HeaderType::Custom(name))
}
}
}
impl std::fmt::Display for HeaderType {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
match(self) {
$(HeaderType::$header => {
formatter.write_str($name)
})*
}
}
}
paste! {
$(
header!($header, $name $(,$types $(, $default )?)? );
)*
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Header {
$(
$header([<$header Value>]),
)*
Custom(CustomValue)
}
#[doc(hidden)]
pub mod parser {
#![allow(non_snake_case)]
use super::*;
pub type HeaderValueConverter = dyn Fn(&str) -> Result<Header, StompParseError>;
pub fn find_header_parser(header_type: HeaderType) -> Box<HeaderValueConverter> {
match header_type {
$(
HeaderType::$header => Box::new([<parse_ $header _header>]),
)*
}
}
$(
pub fn [<parse_ $header _header>]<'a>(input: &'a str) -> Result<Header, StompParseError> {
[<$header Value>]::from_str(input).map(Header::$header)
}
)*
}
}
}
}
|
//
// Sysinfo
//
// Copyright (c) 2018 Guillaume Gomez
//
use std::ffi::{OsStr, OsString};
use std::fmt::{Debug, Error, Formatter};
use std::str;
use std::path::Path;
use DiskExt;
use winapi::um::fileapi::GetDiskFreeSpaceExW;
use winapi::um::winnt::ULARGE_INTEGER;
/// Enum containing the different handled disks types.
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum DiskType {
/// HDD type.
HDD,
/// SSD type.
SSD,
/// Unknown type.
Unknown(isize),
}
impl From<isize> for DiskType {
fn from(t: isize) -> DiskType {
match t {
0 => DiskType::HDD,
1 => DiskType::SSD,
id => DiskType::Unknown(id),
}
}
}
pub fn new_disk(name: &OsStr, mount_point: &[u16], file_system: &[u8], type_: DiskType,
total_space: u64) -> Disk {
let mut d = Disk {
type_: type_,
name: name.to_owned(),
file_system: file_system.to_vec(),
mount_point: mount_point.to_vec(),
s_mount_point: String::from_utf16_lossy(&mount_point[..mount_point.len() - 1]),
total_space: total_space,
available_space: 0,
};
d.update();
d
}
/// Struct containing a disk information.
pub struct Disk {
type_: DiskType,
name: OsString,
file_system: Vec<u8>,
mount_point: Vec<u16>,
s_mount_point: String,
total_space: u64,
available_space: u64,
}
impl Debug for Disk {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
write!(fmt,
"Disk({:?})[FS: {:?}][Type: {:?}] mounted on {:?}: {}/{} B",
self.get_name(), str::from_utf8(self.get_file_system()).unwrap(), self.get_type(),
self.get_mount_point(), self.get_available_space(), self.get_total_space())
}
}
impl DiskExt for Disk {
fn get_type(&self) -> DiskType {
self.type_
}
fn get_name(&self) -> &OsStr {
&self.name
}
fn get_file_system(&self) -> &[u8] {
&self.file_system
}
fn get_mount_point(&self) -> &Path {
&Path::new(&self.s_mount_point)
}
fn get_total_space(&self) -> u64 {
self.total_space
}
fn get_available_space(&self) -> u64 {
self.available_space
}
fn update(&mut self) -> bool {
if self.total_space != 0 {
unsafe {
let mut tmp: ULARGE_INTEGER = ::std::mem::zeroed();
if GetDiskFreeSpaceExW(self.mount_point.as_ptr(),
::std::ptr::null_mut(),
::std::ptr::null_mut(),
&mut tmp) != 0 {
self.available_space = *tmp.QuadPart();
return true;
}
}
}
false
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.