text stringlengths 8 4.13M |
|---|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::IES {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct OERISR {
bits: bool,
}
impl OERISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BERISR {
bits: bool,
}
impl BERISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct PERISR {
bits: bool,
}
impl PERISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FERISR {
bits: bool,
}
impl FERISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RTRISR {
bits: bool,
}
impl RTRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TXRISR {
bits: bool,
}
impl TXRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RXRISR {
bits: bool,
}
impl RXRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DSRMRISR {
bits: bool,
}
impl DSRMRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DCDMRISR {
bits: bool,
}
impl DCDMRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CTSMRISR {
bits: bool,
}
impl CTSMRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TXCMPMRISR {
bits: bool,
}
impl TXCMPMRISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _OERISW<'a> {
w: &'a mut W,
}
impl<'a> _OERISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BERISW<'a> {
w: &'a mut W,
}
impl<'a> _BERISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _PERISW<'a> {
w: &'a mut W,
}
impl<'a> _PERISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FERISW<'a> {
w: &'a mut W,
}
impl<'a> _FERISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _RTRISW<'a> {
w: &'a mut W,
}
impl<'a> _RTRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TXRISW<'a> {
w: &'a mut W,
}
impl<'a> _TXRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _RXRISW<'a> {
w: &'a mut W,
}
impl<'a> _RXRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DSRMRISW<'a> {
w: &'a mut W,
}
impl<'a> _DSRMRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DCDMRISW<'a> {
w: &'a mut W,
}
impl<'a> _DCDMRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CTSMRISW<'a> {
w: &'a mut W,
}
impl<'a> _CTSMRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TXCMPMRISW<'a> {
w: &'a mut W,
}
impl<'a> _TXCMPMRISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 10 - This bit holds the overflow interrupt status."]
#[inline]
pub fn oeris(&self) -> OERISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
OERISR { bits }
}
#[doc = "Bit 9 - This bit holds the break error interrupt status."]
#[inline]
pub fn beris(&self) -> BERISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BERISR { bits }
}
#[doc = "Bit 8 - This bit holds the parity error interrupt status."]
#[inline]
pub fn peris(&self) -> PERISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
PERISR { bits }
}
#[doc = "Bit 7 - This bit holds the framing error interrupt status."]
#[inline]
pub fn feris(&self) -> FERISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FERISR { bits }
}
#[doc = "Bit 6 - This bit holds the receive timeout interrupt status."]
#[inline]
pub fn rtris(&self) -> RTRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RTRISR { bits }
}
#[doc = "Bit 5 - This bit holds the transmit interrupt status."]
#[inline]
pub fn txris(&self) -> TXRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TXRISR { bits }
}
#[doc = "Bit 4 - This bit holds the receive interrupt status."]
#[inline]
pub fn rxris(&self) -> RXRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RXRISR { bits }
}
#[doc = "Bit 3 - This bit holds the modem DSR interrupt status."]
#[inline]
pub fn dsrmris(&self) -> DSRMRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DSRMRISR { bits }
}
#[doc = "Bit 2 - This bit holds the modem DCD interrupt status."]
#[inline]
pub fn dcdmris(&self) -> DCDMRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DCDMRISR { bits }
}
#[doc = "Bit 1 - This bit holds the modem CTS interrupt status."]
#[inline]
pub fn ctsmris(&self) -> CTSMRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CTSMRISR { bits }
}
#[doc = "Bit 0 - This bit holds the modem TXCMP interrupt status."]
#[inline]
pub fn txcmpmris(&self) -> TXCMPMRISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TXCMPMRISR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 10 - This bit holds the overflow interrupt status."]
#[inline]
pub fn oeris(&mut self) -> _OERISW {
_OERISW { w: self }
}
#[doc = "Bit 9 - This bit holds the break error interrupt status."]
#[inline]
pub fn beris(&mut self) -> _BERISW {
_BERISW { w: self }
}
#[doc = "Bit 8 - This bit holds the parity error interrupt status."]
#[inline]
pub fn peris(&mut self) -> _PERISW {
_PERISW { w: self }
}
#[doc = "Bit 7 - This bit holds the framing error interrupt status."]
#[inline]
pub fn feris(&mut self) -> _FERISW {
_FERISW { w: self }
}
#[doc = "Bit 6 - This bit holds the receive timeout interrupt status."]
#[inline]
pub fn rtris(&mut self) -> _RTRISW {
_RTRISW { w: self }
}
#[doc = "Bit 5 - This bit holds the transmit interrupt status."]
#[inline]
pub fn txris(&mut self) -> _TXRISW {
_TXRISW { w: self }
}
#[doc = "Bit 4 - This bit holds the receive interrupt status."]
#[inline]
pub fn rxris(&mut self) -> _RXRISW {
_RXRISW { w: self }
}
#[doc = "Bit 3 - This bit holds the modem DSR interrupt status."]
#[inline]
pub fn dsrmris(&mut self) -> _DSRMRISW {
_DSRMRISW { w: self }
}
#[doc = "Bit 2 - This bit holds the modem DCD interrupt status."]
#[inline]
pub fn dcdmris(&mut self) -> _DCDMRISW {
_DCDMRISW { w: self }
}
#[doc = "Bit 1 - This bit holds the modem CTS interrupt status."]
#[inline]
pub fn ctsmris(&mut self) -> _CTSMRISW {
_CTSMRISW { w: self }
}
#[doc = "Bit 0 - This bit holds the modem TXCMP interrupt status."]
#[inline]
pub fn txcmpmris(&mut self) -> _TXCMPMRISW {
_TXCMPMRISW { w: self }
}
}
|
//! Definition of linear combinations.
use curve25519_dalek::scalar::Scalar;
use std::iter::FromIterator;
use std::ops::{Add, Mul, Neg, Sub};
/// Represents a variable in a constraint system.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Variable {
/// Represents an external input specified by a commitment.
Committed(usize),
/// Represents the left input of a multiplication gate.
MultiplierLeft(usize),
/// Represents the right input of a multiplication gate.
MultiplierRight(usize),
/// Represents the output of a multiplication gate.
MultiplierOutput(usize),
/// Represents the constant 1.
One(),
}
impl From<Variable> for LinearCombination {
fn from(v: Variable) -> LinearCombination {
LinearCombination {
terms: vec![(v, Scalar::one())],
}
}
}
impl<S: Into<Scalar>> From<S> for LinearCombination {
fn from(s: S) -> LinearCombination {
LinearCombination {
terms: vec![(Variable::One(), s.into())],
}
}
}
// Arithmetic on variables produces linear combinations
impl Neg for Variable {
type Output = LinearCombination;
fn neg(self) -> Self::Output {
-LinearCombination::from(self)
}
}
impl<L: Into<LinearCombination>> Add<L> for Variable {
type Output = LinearCombination;
fn add(self, other: L) -> Self::Output {
LinearCombination::from(self) + other.into()
}
}
impl<L: Into<LinearCombination>> Sub<L> for Variable {
type Output = LinearCombination;
fn sub(self, other: L) -> Self::Output {
LinearCombination::from(self) - other.into()
}
}
impl<S: Into<Scalar>> Mul<S> for Variable {
type Output = LinearCombination;
fn mul(self, other: S) -> Self::Output {
LinearCombination {
terms: vec![(self, other.into())],
}
}
}
// Arithmetic on scalars with variables produces linear combinations
impl Add<Variable> for Scalar {
type Output = LinearCombination;
fn add(self, other: Variable) -> Self::Output {
LinearCombination {
terms: vec![(Variable::One(), self), (other, Scalar::one())],
}
}
}
impl Sub<Variable> for Scalar {
type Output = LinearCombination;
fn sub(self, other: Variable) -> Self::Output {
LinearCombination {
terms: vec![(Variable::One(), self), (other, -Scalar::one())],
}
}
}
impl Mul<Variable> for Scalar {
type Output = LinearCombination;
fn mul(self, other: Variable) -> Self::Output {
LinearCombination {
terms: vec![(other, self)],
}
}
}
/// Represents a linear combination of
/// [`Variables`](::r1cs::Variable). Each term is represented by a
/// `(Variable, Scalar)` pair.
#[derive(Clone, Debug, PartialEq)]
pub struct LinearCombination {
pub(super) terms: Vec<(Variable, Scalar)>,
}
impl Default for LinearCombination {
fn default() -> Self {
LinearCombination { terms: Vec::new() }
}
}
impl FromIterator<(Variable, Scalar)> for LinearCombination {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = (Variable, Scalar)>,
{
LinearCombination {
terms: iter.into_iter().collect(),
}
}
}
impl<'a> FromIterator<&'a (Variable, Scalar)> for LinearCombination {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = &'a (Variable, Scalar)>,
{
LinearCombination {
terms: iter.into_iter().cloned().collect(),
}
}
}
// Arithmetic on linear combinations
impl<L: Into<LinearCombination>> Add<L> for LinearCombination {
type Output = Self;
fn add(mut self, rhs: L) -> Self::Output {
self.terms.extend(rhs.into().terms.iter().cloned());
LinearCombination { terms: self.terms }
}
}
impl<L: Into<LinearCombination>> Sub<L> for LinearCombination {
type Output = Self;
fn sub(mut self, rhs: L) -> Self::Output {
self.terms
.extend(rhs.into().terms.iter().map(|(var, coeff)| (*var, -coeff)));
LinearCombination { terms: self.terms }
}
}
impl Mul<LinearCombination> for Scalar {
type Output = LinearCombination;
fn mul(self, other: LinearCombination) -> Self::Output {
let out_terms = other
.terms
.into_iter()
.map(|(var, scalar)| (var, scalar * self))
.collect();
LinearCombination { terms: out_terms }
}
}
impl Neg for LinearCombination {
type Output = Self;
fn neg(mut self) -> Self::Output {
for (_, s) in self.terms.iter_mut() {
*s = -*s
}
self
}
}
impl<S: Into<Scalar>> Mul<S> for LinearCombination {
type Output = Self;
fn mul(mut self, other: S) -> Self::Output {
let other = other.into();
for (_, s) in self.terms.iter_mut() {
*s *= other
}
self
}
}
|
extern crate mpd;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde;
use mpd::Client;
#[derive(Serialize, Debug)]
struct JsonSong {
file: String,
title: String,
album: String,
artist: String,
duration: String,
}
pub fn play(mut conn: Client) { conn.play().unwrap(); }
pub fn pause(mut conn: Client) { conn.toggle_pause().unwrap(); }
pub fn stop(mut conn: Client) { conn.stop().unwrap(); }
pub fn next(mut conn: Client) { conn.next().unwrap(); }
pub fn prev(mut conn: Client) { conn.prev().unwrap(); }
pub fn update(mut conn: Client) -> String {
let db_scan = conn.rescan();
match db_scan {
Ok(_) => String::from("Updating Database"),
Err(e) => e.to_string()
}
}
pub fn get_song(mut conn: Client) -> String {
let song = JsonSong {
file: get_tag(&mut conn, "file"),
title: get_tag(&mut conn, "title"),
album: get_tag(&mut conn, "album"),
artist: get_tag(&mut conn, "artist"),
duration: get_tag(&mut conn, "duration"),
};
return serde_json::to_string(&song).unwrap();
}
pub fn get_tag(conn: &mut Client, tag: &str) -> String {
let song = conn.currentsong().unwrap();
let no_play = String::from("Nothing is playing!");
match &song {
&None => return no_play.to_owned(),
&Some(ref s) => match tag {
"file" => return s.file.to_owned(),
"album" => match &s.tags.get("Album") {
&None => return String::from("Album not found!").to_owned(),
&Some(album) => return album.to_owned(),
},
"artist" => match &s.tags.get("Artist") {
&None => return String::from("Artist not found!").to_owned(),
&Some(artist) => return artist.to_owned(),
},
"duration" => match &s.duration {
&None => return no_play.to_owned(),
&Some(ref duration) => {
return format!("{minutes}.{seconds}",
minutes = duration.num_minutes(),
seconds = format!("{:02}", (duration.num_seconds() % 60))
).to_owned();
}
},
"title" => match &s.title {
&None => return no_play.to_owned(),
&Some(ref title) => return title.to_owned()
},
_ => return String::from("🐄🦄").to_owned()
}
}
}
|
use aoc_runner_derive::{aoc, aoc_generator};
use std::collections::VecDeque;
use std::num::ParseIntError;
type Player = VecDeque<usize>;
type PlayersState = Vec<Player>;
#[aoc_generator(day22)]
fn parse_input_day1(input: &str) -> Result<PlayersState, ParseIntError> {
input
.split("\n\n")
.map(|p| {
p.lines()
.filter(|l| !l.starts_with("Player"))
.map(|l| l.parse())
.collect()
})
.collect()
}
fn play_turn(players: &mut [Player], winner: Option<usize>) -> usize {
let card1 = players[0].pop_front().unwrap();
let card2 = players[1].pop_front().unwrap();
let winner = winner.unwrap_or_else(|| if card1 > card2 { 0 } else { 1 });
if winner == 0 {
players[0].push_back(card1);
players[0].push_back(card2);
} else {
players[1].push_back(card2);
players[1].push_back(card1);
}
winner
}
fn play_game(players: &mut [Player]) -> usize {
let mut winner = 0;
while !players.iter().any(|p| p.is_empty()) {
winner = play_turn(players, None);
}
winner
}
fn compute_score(players: &[Player], winner: usize) -> usize {
players[winner]
.iter()
.enumerate()
.fold(0, |acc, (i, c)| acc + (players[winner].len() - i) * c)
}
#[aoc(day22, part1)]
fn part1(players: &[Player]) -> usize {
let mut tmp = players.to_vec();
let winner = play_game(&mut tmp);
compute_score(&tmp, winner)
}
fn play_recursive(players: &mut [Player], seen: &mut Vec<PlayersState>, winner: &mut usize) {
while !players.iter().any(|p| p.is_empty()) {
if seen.contains(&players.to_vec()) {
*winner = 0;
return;
} else {
seen.push(players.to_vec());
let card1 = players[0][0];
let card2 = players[1][0];
if players[0].len() > card1 && players[1].len() > card2 {
play_recursive(
vec![
players[0].iter().skip(1).take(card1).copied().collect(),
players[1].iter().skip(1).take(card2).copied().collect(),
]
.as_mut(),
&mut Vec::new(),
winner,
);
play_turn(players, Some(*winner));
} else {
*winner = play_turn(players, None);
}
}
}
}
#[aoc(day22, part2)]
fn part2(players: &[Player]) -> usize {
let mut tmp = players.to_vec();
let mut winner = 0;
play_recursive(&mut tmp, &mut Vec::new(), &mut winner);
compute_score(&tmp, winner)
}
|
fn main() {
const MAX_POINTS: u32 = 100_000;
let x = 5;
println!("The value of x is: {}", x);
let x = x + 1;
let x = x * MAX_POINTS;
println!("The value of x is: {}", x);
let spaces = " ";
let spaces: usize = spaces.len();
}
|
pub mod cache;
pub mod git;
pub mod repo;
pub mod util;
|
//! Mock objects, useful in testing and development.
pub mod runtime;
pub mod transport;
|
use crate::config_instruction::ConfigInstruction;
use crate::ConfigState;
use solana_sdk::hash::Hash;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signature::{Keypair, KeypairUtil};
use solana_sdk::transaction::Transaction;
pub struct ConfigTransaction {}
impl ConfigTransaction {
/// Create a new, empty configuration account
pub fn new_account<T: ConfigState>(
from_keypair: &Keypair,
config_account_pubkey: &Pubkey,
recent_blockhash: Hash,
lamports: u64,
fee: u64,
) -> Transaction {
let mut transaction = Transaction::new(vec![ConfigInstruction::new_account::<T>(
&from_keypair.pubkey(),
config_account_pubkey,
lamports,
)]);
transaction.fee = fee;
transaction.sign(&[from_keypair], recent_blockhash);
transaction
}
/// Store new state in a configuration account
pub fn new_store<T: ConfigState>(
config_account_keypair: &Keypair,
data: &T,
recent_blockhash: Hash,
fee: u64,
) -> Transaction {
let mut transaction = Transaction::new(vec![ConfigInstruction::new_store(
&config_account_keypair.pubkey(),
data,
)]);
transaction.fee = fee;
transaction.sign(&[config_account_keypair], recent_blockhash);
transaction
}
}
|
use caolo_sim::{
components::UserProperties,
prelude::{UserId, View},
};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use tonic::Status;
use tracing::info;
use uuid::Uuid;
use crate::{
input::users,
protos::{cao_common, cao_users},
};
#[derive(Clone)]
pub struct UsersService {
world: crate::WorldContainer,
}
impl UsersService {
pub fn new(world: crate::WorldContainer) -> Self {
Self { world }
}
}
#[tonic::async_trait]
impl cao_users::users_server::Users for UsersService {
type ListUsersStream = ReceiverStream<Result<cao_common::Uuid, Status>>;
async fn list_users(
&self,
request: tonic::Request<cao_common::Empty>,
) -> Result<tonic::Response<Self::ListUsersStream>, Status> {
let addr = request.remote_addr();
let w = self.world.read().await;
let users: Vec<UserId> = w.list_users().collect();
drop(w); // free the lock
let (tx, rx) = mpsc::channel(512);
tokio::spawn(async move {
for u in users {
let pl = u.0.as_bytes();
let mut data = Vec::with_capacity(pl.len());
data.extend_from_slice(pl);
if tx.send(Ok(cao_common::Uuid { data })).await.is_err() {
info!("list users client lost {:?}", addr);
break;
}
}
});
Ok(tonic::Response::new(ReceiverStream::new(rx)))
}
async fn get_user_info(
&self,
request: tonic::Request<cao_common::Uuid>,
) -> Result<tonic::Response<cao_users::UserInfo>, Status> {
let user_id = &request.get_ref().data;
let user_id = Uuid::from_slice(user_id).map_err(|err| {
Status::invalid_argument(format!("Payload was not a valid UUID: {}", err))
})?;
let user_id = UserId(user_id);
let properties;
{
// free the read guard asap
let w = self.world.read().await;
let props_table: View<UserId, UserProperties> = w.view();
properties = props_table.get(user_id).cloned();
}
let result = match properties {
Some(properies) => cao_users::UserInfo {
user_id: Some(request.into_inner()),
level: properies.level as i32,
},
None => {
return Err(Status::not_found(format!(
"User {} was not found",
user_id.0
)))
}
};
Ok(tonic::Response::new(result))
}
async fn register_user(
&self,
request: tonic::Request<cao_users::RegisterUserMsg>,
) -> Result<tonic::Response<cao_common::Empty>, tonic::Status> {
let req = request.get_ref();
let mut w = self.world.write().await;
users::register_user(&mut w, req)
.map(|_: ()| tonic::Response::new(cao_common::Empty {}))
.map_err(|err| Status::invalid_argument(err.to_string()))
}
}
|
// Copyright 2019 Steven Bosnick
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE-2.0 or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms
use std::fmt::{Debug, Display, Formatter};
use failure::*;
use goblin::error::Error as GoblinError;
/// The error type for the different stages of preloading an elf file.
#[derive(Fail, Debug)]
pub enum Error {
/// The input bytes are not a proper ELF file.
#[fail(display = "The input bytes are not a proper ELF file.")]
BadElf(#[cause] BadElfError),
/// The input ELF file has failed a constraint validation
#[fail(
display = "The input ELF file does not satisfy a required constraint: {}",
message
)]
InvalidElf {
/// The error message that describes the failed constraint.
message: String,
},
/// The output bytes are too small for the layout of the output ELF file.
#[fail(display = "The output bytes are too small for the layout of the output ELF file.")]
OutputTooSmall,
}
#[doc(hidden)]
impl From<GoblinError> for Error {
fn from(inner: GoblinError) -> Self {
Error::BadElf(BadElfError(inner))
}
}
/// A specilized Result type for elf preloading operations.
pub type Result<T> = std::result::Result<T, Error>;
/// A new type to wrap errors in parsing an ELF file.
#[derive(Fail)]
pub struct BadElfError(#[cause] GoblinError);
impl Display for BadElfError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "Error parsing the input bytes as an ELF file.")
}
}
impl Debug for BadElfError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "goblin error parsing input bytes: {}", self.0)
}
}
|
use std::fs;
fn main() {
let text = fs::read_to_string("../11_input.txt").unwrap();
let lines: Vec<&str> = text
.split("\n")
.collect();
let mut matrix: Vec<Vec<i32>> = Vec::new();
for line in lines.into_iter() {
matrix.push(
line.split(" ")
.map(|x| x.parse::<i32>().unwrap())
.collect::<Vec<i32>>()
)
}
print!("{}", product_dir(&matrix, 4));
}
fn product_dir(matrix: &[Vec<i32>], size: usize) -> i32 {
let mut greatest_product = 0;
// horizontal
for i in 0..matrix.len() {
for j in 0..=matrix[i].len()-size {
let current = matrix[i][j..j+size].iter().product();
if current > greatest_product {
greatest_product = current;
}
}
}
// vertical
for i in 0..=matrix.len()-size {
for j in 0..matrix[i].len() {
let mut current = 1;
for k in 0..size {
current *= matrix[i+k][j];
}
if current > greatest_product {
greatest_product = current;
}
}
}
// ascending
for i in 0..=matrix.len()-size {
for j in 0..=matrix[i].len()-size {
let mut current = 1;
for k in 0..size {
current *= matrix[i+k][j+k];
}
if current > greatest_product {
greatest_product = current;
}
}
}
// descending
for i in size+1..matrix.len() {
for j in 0..=matrix[i].len()-size {
let mut current = 1;
for k in 0..size {
current *= matrix[i-k][j+k];
}
if current > greatest_product {
greatest_product = current;
}
}
}
greatest_product
} |
use std::{convert::Infallible, sync::Arc};
use bonsaidb_core::schema::{view, InvalidNameError};
use nebari::AbortError;
use crate::vault;
/// Errors that can occur from interacting with storage.
#[derive(thiserror::Error, Debug)]
pub enum Error {
/// An error occurred interacting with the storage layer, `nebari`.
#[error("error from storage: {0}")]
Nebari(#[from] nebari::Error),
/// An error occurred serializing the underlying database structures.
#[error("error while serializing internal structures: {0}")]
InternalSerialization(#[from] bincode::Error),
/// An error occurred serializing the contents of a `Document` or results of a `View`.
#[error("error while serializing: {0}")]
Serialization(#[from] serde_cbor::Error),
/// An internal error occurred while waiting for a message.
#[error("error while waiting for a message: {0}")]
InternalCommunication(#[from] flume::RecvError),
/// An error occurred while executing a view
#[error("error from view: {0}")]
View(#[from] view::Error),
/// An error occurred in the secrets storage layer.
#[error("a vault error occurred: {0}")]
Vault(#[from] vault::Error),
/// An core error occurred.
#[error("a core error occurred: {0}")]
Core(#[from] bonsaidb_core::Error),
/// An unexpected error occurred.
#[error("an unexpected error occurred: {0}")]
Other(#[from] Arc<anyhow::Error>),
}
impl From<Error> for bonsaidb_core::Error {
fn from(err: Error) -> Self {
match err {
Error::Core(core) => core,
other => Self::Database(other.to_string()),
}
}
}
impl From<InvalidNameError> for Error {
fn from(err: InvalidNameError) -> Self {
Self::Core(bonsaidb_core::Error::from(err))
}
}
impl From<AbortError<Infallible>> for Error {
fn from(err: AbortError<Infallible>) -> Self {
match err {
AbortError::Nebari(error) => Self::Nebari(error),
AbortError::Other(_) => unreachable!(),
}
}
}
#[test]
fn test_converting_error() {
use serde::ser::Error as _;
let err: bonsaidb_core::Error =
Error::Serialization(serde_cbor::Error::custom("mymessage")).into();
match err {
bonsaidb_core::Error::Database(storage_error) => {
assert!(storage_error.contains("mymessage"));
}
_ => unreachable!(),
}
}
|
use std::{env, fs::{File, OpenOptions}, io::{Error, Read, Write}, path::Path};
use serde::{Serialize, Deserialize};
use serde_json;
#[derive(Serialize, Deserialize)]
struct Todo {
text: String,
tag: String,
}
impl Todo {
fn new(text: String, tag: String) -> Self {
Self {
text, tag
}
}
}
type Todos = Vec<Todo>;
// Get a file from a path, creating it if it doesn't exist, and returning Err(why) if there's a problem
fn get_file(path: &Path) -> Result<File, Error> {
OpenOptions::new().read(true).write(true).open(path)
}
// Read the data in a file and parse the JSON
fn parse_file(file: &mut File) -> Result<Todos, Error> {
let mut buffer = String::new();
file.read_to_string(&mut buffer)?;
Ok(serde_json::from_str(&buffer[..])?)
}
// Save todos into the todo list file
fn save_todos(file: &mut File, todos: &Todos) -> Result<(), Error> {
let data = serde_json::to_string(todos)?;
file.write_all(data.as_bytes())
}
fn main() {
let args: Vec<String> = env::args().collect();
let path = Path::new("todos.json");
let display = path.display();
match &args[1][..] {
"list" => println!("List"),
_ => println!("Unrecognised command!"),
}
// Read the file into the buffer
let mut file = get_file(path).expect(&format!("Error opening file: {}", display)[..]);
let mut todo_list: Todos = parse_file(&mut file).unwrap();
println!("Before:");
for todo in &todo_list {
println!("Todo: {}, Tag: {}", todo.text, todo.tag);
};
let new_todo = Todo::new(String::from("Todo5"), String::from("Todo5 Tag"));
todo_list.append(&mut vec![new_todo]);
save_todos(&mut file, &todo_list).unwrap();
println!("After:");
for todo in &todo_list {
println!("Todo: {}, Tag: {}", todo.text, todo.tag);
};
}
|
use serde_derive::Deserialize;
use std::collections::BTreeMap;
#[derive(Deserialize)]
pub struct BookmarkList(pub BTreeMap<String, String>);
impl BookmarkList {
pub fn to_string(&self) -> String {
self.0
.iter()
.map(|(key, value)| format!("\x1B[1m{}\x1B[0m \x1B[38;5;249m{}\x1B[0m", key, value))
.collect::<Vec<String>>()
.join("\n")
}
pub fn command_at(&self, index: usize) -> String {
self.0.values().cloned().collect::<Vec<String>>()[index].clone()
}
pub fn has_item<T: AsRef<str>>(&self, name: T) -> bool {
self.0.contains_key(name.as_ref())
}
pub fn get_item<T: AsRef<str>>(&self, name: T) -> &String {
self.0.get(name.as_ref()).unwrap()
}
}
|
/*!
```rudra-poc
[target]
crate = "csv-sniffer"
version = "0.1.1"
[report]
issue_url = "https://github.com/jblondin/csv-sniffer/issues/1"
issue_date = 2021-01-05
rustsec_url = "https://github.com/RustSec/advisory-db/pull/666"
rustsec_id = "RUSTSEC-2021-0088"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "UninitExposure"
rudra_report_locations = ["src/snip.rs:7:1: 36:2"]
```
!*/
#![forbid(unsafe_code)]
fn main() {
panic!("This issue was reported without PoC");
}
|
fn main() {
let plus_one = |x:i32| -> i32 { x + 1 };
let a = 6;
println!("{} + 1 = {}", a, plus_one(a));
let mut b = 2;
{
let plus_two = |x|
{
let mut z = x;
z += b;
z
};
println!("{} + 2 = {}", 3, plus_two(3));
}
// T by value(copy)
// &T by reference
// &mut T my mutable reference
// Borrow by mutable reference.
println!("{}", &mut b);
let mut c = 12;
let plus_tree = |x: &mut i32| *x += 3;
plus_tree(&mut c);
println!("c = {}", c);
}
|
extern crate alga;
#[macro_use]
extern crate alga_derive;
extern crate approx;
extern crate quickcheck;
use alga::general::{AbstractMagma, Additive, Identity, Multiplicative, TwoSidedInverse, Field};
use approx::{AbsDiffEq, RelativeEq, UlpsEq};
use quickcheck::{Arbitrary, Gen};
use num_traits::{Zero, One};
use std::ops::{Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign, Div, DivAssign};
#[derive(Alga, Clone, PartialEq, Debug)]
#[alga_traits(Field(Additive, Multiplicative))]
#[alga_quickcheck]
struct W(f64);
fn test_trait_impl() {
fn is_field<T: Field>() {}
is_field::<W>();
}
impl AbsDiffEq for W {
type Epsilon = W;
fn default_epsilon() -> W {
W(0.0000000001)
}
fn abs_diff_eq(&self, other: &W, epsilon: W) -> bool {
self.0.abs_diff_eq(&other.0, epsilon.0)
}
}
impl RelativeEq for W {
fn default_max_relative() -> W {
W(0.0000000001)
}
fn relative_eq(&self, other: &Self, epsilon: W, max_relative: W) -> bool {
self.0.relative_eq(&other.0, epsilon.0, max_relative.0)
}
}
impl UlpsEq for W {
fn default_max_ulps() -> u32 {
40
}
fn ulps_eq(&self, other: &Self, epsilon: W, max_ulps: u32) -> bool {
self.0.ulps_eq(&other.0, epsilon.0, max_ulps)
}
}
impl Arbitrary for W {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
W(f64::arbitrary(g))
}
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
Box::new(self.0.shrink().map(W))
}
}
impl AbstractMagma<Additive> for W {
fn operate(&self, right: &Self) -> Self {
W(self.0 + right.0)
}
}
impl AbstractMagma<Multiplicative> for W {
fn operate(&self, right: &Self) -> Self {
W(self.0 * right.0)
}
}
impl TwoSidedInverse<Additive> for W {
fn two_sided_inverse(&self) -> Self {
W(-self.0)
}
}
impl TwoSidedInverse<Multiplicative> for W {
fn two_sided_inverse(&self) -> Self {
W(1. / self.0)
}
}
impl Identity<Additive> for W {
fn identity() -> Self {
W(0.)
}
}
impl Identity<Multiplicative> for W {
fn identity() -> Self {
W(1.)
}
}
impl Add<W> for W {
type Output = W;
fn add(self, rhs: W) -> W {
W(self.0 + rhs.0)
}
}
impl Sub<W> for W {
type Output = W;
fn sub(self, rhs: W) -> W {
W(self.0 - rhs.0)
}
}
impl AddAssign<W> for W {
fn add_assign(&mut self, rhs: W) {
self.0 += rhs.0
}
}
impl SubAssign<W> for W {
fn sub_assign(&mut self, rhs: W) {
self.0 -= rhs.0
}
}
impl Neg for W {
type Output = W;
fn neg(self) -> W {
W(-self.0)
}
}
impl Zero for W {
fn zero() -> W {
W(0.0)
}
fn is_zero(&self) -> bool {
self.0.is_zero()
}
}
impl One for W {
fn one() -> W {
W(1.0)
}
}
impl Mul<W> for W {
type Output = W;
fn mul(self, rhs: W) -> W {
W(self.0 * rhs.0)
}
}
impl Div<W> for W {
type Output = W;
fn div(self, rhs: W) -> W {
W(self.0 / rhs.0)
}
}
impl MulAssign<W> for W {
fn mul_assign(&mut self, rhs: W) {
self.0 *= rhs.0
}
}
impl DivAssign<W> for W {
fn div_assign(&mut self, rhs: W) {
self.0 /= rhs.0
}
}
|
//! # ez-pixmap
//!
//! A naive and easy inline pixmap (xpm-like) image decoder.
//! This is non-compliant with xpm image format, however it's close enough.
//! - Doesn't support monochrome nor symbolics.
//! - Supports only 1 character per pixel.
//!
//! Main use case: Simple icon art.
//!
//! ## Usage
//! ```ignored
//! [dependencies]
//! ez-pixmap = "0.2"
//! ```
//!
//! ```no_run
//! extern crate ez_pixmap;
//!
//! const PXM: &[&str] = &[
//! "50 34 4 1", // <width> <height> <num of colors> <chars/pixels>
//! " c black", // <char> c <color>
//! "o c #ff9900",
//! "@ c white",
//! "# c None",
//! // pixels
//! "##################################################",
//! "### ############################## ####",
//! "### ooooo ########################### ooooo ####",
//! "### oo oo ######################### oo oo ####",
//! "### oo oo ####################### oo oo ####",
//! "### oo oo ##################### oo oo ####",
//! "### oo oo ################### oo oo ####",
//! "### oo oo oo oo ####",
//! "### oo oo ooooooooooooooo oo oo ####",
//! "### oo ooooooooooooooooooooo oo ####",
//! "### oo ooooooooooooooooooooooooooo ooo ####",
//! "#### oo ooooooo ooooooooooooo ooooooo oo #####",
//! "#### oo oooooooo ooooooooooooo oooooooo oo #####",
//! "##### oo oooooooo ooooooooooooo oooooooo oo ######",
//! "##### o ooooooooooooooooooooooooooooooo o ######",
//! "###### ooooooooooooooooooooooooooooooooooo #######",
//! "##### ooooooooo ooooooooo ooooooooo ######",
//! "##### oooooooo @@@ ooooooo @@@ oooooooo ######",
//! "##### oooooooo @@@@@ ooooooo @@@@@ oooooooo ######",
//! "##### oooooooo @@@@@ ooooooo @@@@@ oooooooo ######",
//! "##### oooooooo @@@ ooooooo @@@ oooooooo ######",
//! "##### ooooooooo ooooooooo ooooooooo ######",
//! "###### oooooooooooooo oooooooooooooo #######",
//! "###### oooooooo@@@@@@@ @@@@@@@oooooooo #######",
//! "###### ooooooo@@@@@@@@@ @@@@@@@@@ooooooo #######",
//! "####### ooooo@@@@@@@@@@@ @@@@@@@@@@@ooooo ########",
//! "######### oo@@@@@@@@@@@@ @@@@@@@@@@@@oo ##########",
//! "########## o@@@@@@ @@@@@ @@@@@ @@@@@@o ###########",
//! "########### @@@@@@@ @ @@@@@@@ ############",
//! "############ @@@@@@@@@@@@@@@@@@@@@ #############",
//! "############## @@@@@@@@@@@@@@@@@ ###############",
//! "################ @@@@@@@@@ #################",
//! "#################### #####################",
//! "##################################################",
//! ];
//!
//! fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let my_image = ez_pixmap::RgbaImage::from(PXM)?;
//! assert_eq!(my_image.width(), 50);
//! assert_eq!(my_image.height(), 34);
//! assert_eq!(my_image.data().len(), 50 * 34 * 4); // since it's rgba
//! Ok(())
//! }
//! ```
//!
//! The list of supported color names can be found [here](https://github.com/MoAlyousef/ez-pixmap/blob/main/src/colors.rs).
#![warn(missing_docs)]
/// EzPixmap Error types
#[derive(Debug)]
pub enum EzPixmapError {
/// Parse error
ParseError(std::num::ParseIntError),
/// Internal error
Internal(EzPixmapErrorKind),
}
/// EzPixmap error kinds
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum EzPixmapErrorKind {
/// Invalid EzPixmap format
InvalidFormat,
/// Xpm feature not implemented
NotImplemented,
}
impl std::error::Error for EzPixmapError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
EzPixmapError::ParseError(err) => Some(err),
_ => None,
}
}
}
impl std::fmt::Display for EzPixmapError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
EzPixmapError::ParseError(ref err) => err.fmt(f),
EzPixmapError::Internal(ref err) => write!(f, "An internal error occured {:?}", err),
}
}
}
impl From<std::num::ParseIntError> for EzPixmapError {
fn from(err: std::num::ParseIntError) -> EzPixmapError {
EzPixmapError::ParseError(err)
}
}
#[derive(Default, Clone)]
struct Header {
w: u32,
h: u32,
num_colors: u32,
ppc: u32,
}
#[derive(Default, Clone, Copy)]
struct ColorMap {
c: char,
col: (u8, u8, u8, u8),
}
/// Struct containing Rgba data
#[derive(Debug, Clone)]
pub struct RgbaImage {
width: u32,
height: u32,
data: Vec<u8>,
}
impl RgbaImage {
/// Generate RGBA data from a pixmap
pub fn from(pixmap: &[&str]) -> Result<RgbaImage, EzPixmapError> {
let mut header = Header::default();
let mut data = vec![];
let mut col_vec: Vec<ColorMap> = vec![];
for i in 0..pixmap.len() {
if i == 0 {
let line = pixmap[0];
let vals: Vec<&str> = line.split_ascii_whitespace().collect();
header.w = vals[0].parse()?;
header.h = vals[1].parse()?;
header.num_colors = vals[2].parse()?;
header.ppc = vals[3].parse()?;
if header.ppc != 1 {
return Err(EzPixmapError::Internal(EzPixmapErrorKind::InvalidFormat));
}
continue;
}
if i <= header.num_colors as usize {
let mut col = ColorMap::default();
let line = pixmap[i];
let chars: Vec<char> = line.chars().collect();
col.c = chars[0];
if chars[2] != 'c' {
return Err(EzPixmapError::Internal(EzPixmapErrorKind::InvalidFormat));
}
let color: String = chars[4..].iter().collect();
if color.starts_with('#') {
// shouldn't fail
let color = color.strip_prefix("#").unwrap();
let r = u8::from_str_radix(&color[0..2], 16)?;
let g = u8::from_str_radix(&color[2..4], 16)?;
let b = u8::from_str_radix(&color[4..6], 16)?;
let a = 255;
col.col = (r, g, b, a);
} else {
if color == "None" || color == "none" {
col.col = (255, 255, 255, 0);
} else {
let rgb = *color_maps::x::X_MAP.get(color.as_str()).unwrap_or(&(0, 0, 0));
col.col = (rgb.0, rgb.1, rgb.2, 255);
}
}
col_vec.push(col);
continue;
}
let line = pixmap[i];
let chars: Vec<char> = line.chars().collect();
for c in chars {
for elem in &col_vec {
if c == elem.c {
data.push(elem.col.0);
data.push(elem.col.1);
data.push(elem.col.2);
data.push(elem.col.3);
}
}
}
}
if data.len() != (header.w * header.h * 4) as usize {
return Err(EzPixmapError::Internal(EzPixmapErrorKind::InvalidFormat));
}
Ok(RgbaImage {
data,
width: header.w,
height: header.h,
})
}
/// Get the data of the image
pub fn data(&self) -> &[u8] {
&self.data
}
/// Get the width of the RgbaImage
pub fn width(&self) -> u32 {
self.width
}
/// Get the height of the RgbaImage
pub fn height(&self) -> u32 {
self.height
}
}
|
use std::io::Read;
use std::path::Path;
use std::fs::File;
#[macro_use]
extern crate sdl2;
extern crate nes;
mod gui;
fn main() {
let path_str = String::from("../test-roms/spritecans.nes");
println!("{}", path_str);
let mut gui = gui::GuiObject::new();
gui.load_rom_from_file(Path::new(&path_str)).unwrap();
gui.run();
println!("Hello, rottenes!");
}
|
extern crate termion;
mod event;
pub use event::*;
mod editor;
pub use editor::*;
mod complete;
pub use complete::*;
mod context;
pub use context::*;
mod buffer;
pub use buffer::*;
mod util;
#[cfg(test)]
mod test;
|
use actix_web::{HttpResponse, http::StatusCode};
pub struct DateVal(chrono::NaiveDate);
impl DateVal {
// Could also implement some ToString, Into<String> ? Not sure which makes sense
pub fn format(&self) -> String {
self.0.format("%Y-%m-%d").to_string()
}
pub fn try_from(from: &str) -> crate::error::Result<Self> {
let date = chrono::NaiveDate::parse_from_str(from, "%Y-%m-%d")?;
Ok(DateVal(date))
}
}
impl std::convert::TryFrom<&str> for DateVal {
type Error = crate::error::Error;
fn try_from(from: &str) -> std::result::Result<Self, Self::Error> {
DateVal::try_from(from)
}
}
pub struct DateTimeVal(chrono::NaiveDateTime);
impl DateTimeVal {
// Could also implement some ToString, Into<String> ? Not sure which makes sense
pub fn format(&self) -> String {
self.0.format("%Y-%m-%d %H:%M:%S").to_string()
}
pub fn try_from(from: &str) -> crate::error::Result<Self> {
let datetime = chrono::NaiveDateTime::parse_from_str(from, "%Y-%m-%d %H:%M:%S")?;
Ok(DateTimeVal(datetime))
}
}
impl std::convert::TryFrom<&str> for DateTimeVal {
type Error = crate::error::Error;
fn try_from(from: &str) -> std::result::Result<Self, Self::Error> {
DateTimeVal::try_from(from)
}
}
pub fn html_resp(html: String) -> HttpResponse {
HttpResponse::build(StatusCode::OK)
.content_type("text/html; charset=utf-8")
.body(html)
}
pub fn json_resp<T>(json: T) -> HttpResponse
where
T: serde::Serialize,
{
let json = serde_json::to_string_pretty(&json).unwrap_or("Json error".into());
HttpResponse::build(StatusCode::OK)
.content_type("application/json; charset=utf-8")
.body(json)
}
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Machinery for hygienic macros, inspired by the MTWT[1] paper.
//!
//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler.
//! 2012. *Macros that work together: Compile-time bindings, partial expansion,
//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
/// A SyntaxContext represents a chain of macro expansions (represented by marks).
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)]
pub struct SyntaxContext(pub u32);
#[derive(Copy, Clone)]
pub struct SyntaxContextData {
pub outer_mark: Mark,
pub prev_ctxt: SyntaxContext,
}
/// A mark represents a unique id associated with a macro expansion.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct Mark(u32);
impl Mark {
pub fn fresh() -> Self {
HygieneData::with(|data| {
let next_mark = Mark(data.next_mark.0 + 1);
::std::mem::replace(&mut data.next_mark, next_mark)
})
}
}
struct HygieneData {
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
next_mark: Mark,
}
impl HygieneData {
fn new() -> Self {
HygieneData {
syntax_contexts: vec![SyntaxContextData {
outer_mark: Mark(0), // the null mark
prev_ctxt: SyntaxContext(0), // the empty context
}],
markings: HashMap::new(),
next_mark: Mark(1),
}
}
fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
thread_local! {
static HYGIENE_DATA: RefCell<HygieneData> = RefCell::new(HygieneData::new())
}
HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut()))
}
}
pub fn reset_hygiene_data() {
HygieneData::with(|data| *data = HygieneData::new())
}
impl SyntaxContext {
pub fn empty() -> Self {
SyntaxContext(0)
}
pub fn data(self) -> SyntaxContextData {
HygieneData::with(|data| data.syntax_contexts[self.0 as usize])
}
/// Extend a syntax context with a given mark
pub fn apply_mark(self, mark: Mark) -> SyntaxContext {
// Applying the same mark twice is a no-op
let ctxt_data = self.data();
if mark == ctxt_data.outer_mark {
return ctxt_data.prev_ctxt;
}
HygieneData::with(|data| {
let syntax_contexts = &mut data.syntax_contexts;
*data.markings.entry((self, mark)).or_insert_with(|| {
syntax_contexts.push(SyntaxContextData {
outer_mark: mark,
prev_ctxt: self,
});
SyntaxContext(syntax_contexts.len() as u32 - 1)
})
})
}
/// If `ident` is macro expanded, return the source ident from the macro definition
/// and the mark of the expansion that created the macro definition.
pub fn source(self) -> (Self /* source context */, Mark /* source macro */) {
let macro_def_ctxt = self.data().prev_ctxt.data();
(macro_def_ctxt.prev_ctxt, macro_def_ctxt.outer_mark)
}
}
impl fmt::Debug for SyntaxContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#{}", self.0)
}
}
|
use crate::ir;
use nom::{
branch::alt,
bytes::complete::{tag, take_while, take_while1},
character::complete::{char, digit1, hex_digit1, multispace1, not_line_ending, space0, space1},
combinator::{all_consuming, map, map_res, value},
multi::{fold_many0, many0},
sequence::{preceded, terminated, tuple},
IResult,
};
use std::str::FromStr;
pub fn parse_program(input: &str) -> Result<ir::Program, &'static str> {
let result = all_consuming(terminated(instructions, whitespaces_or_comment))(input);
match result {
Ok((_, instructions)) => {
let program = ir::Program::new(instructions);
Ok(program)
}
Err(_) => Err("Failed to parse program!"),
}
}
fn instructions(input: &str) -> IResult<&str, Vec<ir::Instruction>> {
fold_many0(
preceded(
whitespaces_or_comment,
alt((labeled_instruction, instruction)),
),
Vec::new,
|mut instructions: Vec<_>, mut inst| {
inst.set_address(instructions.len() as u64);
instructions.push(inst);
instructions
},
)(input)
}
fn comment(input: &str) -> IResult<&str, &str> {
preceded(char('%'), not_line_ending)(input)
}
fn whitespaces_or_comment(input: &str) -> IResult<&str, Vec<&str>> {
many0(alt((multispace1, comment)))(input)
}
fn identifier(input: &str) -> IResult<&str, String> {
let alpha_underscore = |c: char| c.is_alphabetic() || c == '_';
let alphanumeric_underscore = |c: char| c.is_alphanumeric() || c == '_';
map(
tuple((
take_while1(alpha_underscore),
take_while(alphanumeric_underscore),
)),
|(s1, s2): (&str, &str)| s1.to_string() + s2,
)(input)
}
fn register(input: &str) -> IResult<&str, ir::Register> {
map(identifier, ir::Register::new)(input)
}
fn dec_num(input: &str) -> IResult<&str, u64> {
map_res(digit1, FromStr::from_str)(input)
}
fn hex_num(input: &str) -> IResult<&str, u64> {
let from_str = |s: &str| u64::from_str_radix(s, 16);
map_res(preceded(tag("0x"), hex_digit1), from_str)(input)
}
fn numeric(input: &str) -> IResult<&str, u64> {
alt((hex_num, dec_num))(input)
}
fn number_literal(input: &str) -> IResult<&str, ir::Expression> {
map(numeric, ir::Expression::NumberLiteral)(input)
}
fn register_ref(input: &str) -> IResult<&str, ir::Expression> {
map(register, ir::Expression::RegisterRef)(input)
}
fn unary_expression(input: &str) -> IResult<&str, ir::Expression> {
let operator = alt((
value(ir::UnaryOperator::Neg, char('-')),
value(ir::UnaryOperator::Not, char('~')),
));
map(
tuple((operator, preceded(space0, simple_expression))),
|(op, expr)| ir::Expression::Unary {
op,
expr: Box::new(expr),
},
)(input)
}
macro_rules! binary_expression {
($func:ident, $operator:expr, $operand:expr) => {
fn $func(input: &str) -> IResult<&str, ir::Expression> {
map(
tuple((
$operand,
preceded(space0, $operator),
preceded(space0, $operand),
)),
|(lhs, op, rhs)| ir::Expression::Binary {
op,
lhs: Box::new(lhs),
rhs: Box::new(rhs),
},
)(input)
}
};
}
binary_expression!(
bitwise_expression,
alt((
value(ir::BinaryOperator::And, tag("/\\")),
value(ir::BinaryOperator::Or, tag("\\/")),
value(ir::BinaryOperator::Xor, char('#')),
value(ir::BinaryOperator::Shl, tag("<<")),
value(ir::BinaryOperator::AShr, tag(">>>")),
value(ir::BinaryOperator::LShr, tag(">>")),
)),
simple_expression
);
binary_expression!(
mul_div_expression,
alt((
value(ir::BinaryOperator::Mul, char('*')),
value(ir::BinaryOperator::UDiv, char('/')),
)),
alt((bitwise_expression, simple_expression))
);
binary_expression!(
add_sub_expression,
alt((
value(ir::BinaryOperator::Add, char('+')),
value(ir::BinaryOperator::Sub, char('-')),
)),
alt((mul_div_expression, bitwise_expression, simple_expression))
);
binary_expression!(
compare_expression,
alt((
value(ir::BinaryOperator::r#Eq, char('=')),
value(ir::BinaryOperator::Neq, tag("\\=")),
value(ir::BinaryOperator::SLe, tag("<=")),
value(ir::BinaryOperator::SLt, char('<')),
value(ir::BinaryOperator::SGe, tag(">=")),
value(ir::BinaryOperator::SGt, char('>')),
)),
alt((
add_sub_expression,
mul_div_expression,
bitwise_expression,
simple_expression,
))
);
fn binary_function(input: &str) -> IResult<&str, ir::Expression> {
let function = alt((
value(ir::BinaryOperator::ULe, tag("ule")),
value(ir::BinaryOperator::ULt, tag("ult")),
value(ir::BinaryOperator::UGe, tag("uge")),
value(ir::BinaryOperator::UGt, tag("ugt")),
value(ir::BinaryOperator::And, tag("and")),
value(ir::BinaryOperator::Or, tag("or")),
value(ir::BinaryOperator::Xor, tag("xor")),
value(ir::BinaryOperator::URem, tag("rem")),
value(ir::BinaryOperator::SRem, tag("srem")),
value(ir::BinaryOperator::SMod, tag("mod")),
));
map(
tuple((
function,
char('('),
preceded(space0, expression),
preceded(space0, char(',')),
preceded(space0, expression),
preceded(space0, char(')')),
)),
|(op, _, lhs, _, rhs, _)| ir::Expression::Binary {
op,
lhs: Box::new(lhs),
rhs: Box::new(rhs),
},
)(input)
}
fn clasped_expression(input: &str) -> IResult<&str, ir::Expression> {
map(
tuple((
char('('),
preceded(space0, expression),
preceded(space0, char(')')),
)),
|(_, e, _)| e,
)(input)
}
fn conditional_expression(input: &str) -> IResult<&str, ir::Expression> {
map(
tuple((
tag("ite("),
preceded(space0, expression),
preceded(space0, char(',')),
preceded(space0, expression),
preceded(space0, char(',')),
preceded(space0, expression),
preceded(space0, char(')')),
)),
|(_, cond, _, then, _, r#else, _)| ir::Expression::Conditional {
cond: Box::new(cond),
then: Box::new(then),
r#else: Box::new(r#else),
},
)(input)
}
fn simple_expression(input: &str) -> IResult<&str, ir::Expression> {
alt((
clasped_expression,
conditional_expression,
binary_function,
unary_expression,
register_ref,
number_literal,
))(input)
}
fn expression(input: &str) -> IResult<&str, ir::Expression> {
alt((
compare_expression,
add_sub_expression,
mul_div_expression,
bitwise_expression,
simple_expression,
))(input)
}
fn label(input: &str) -> IResult<&str, String> {
terminated(identifier, char(':'))(input)
}
fn target(input: &str) -> IResult<&str, ir::Target> {
alt((
map(numeric, ir::Target::Location),
map(identifier, ir::Target::Label),
))(input)
}
fn skip_instruction(input: &str) -> IResult<&str, ir::Instruction> {
value(ir::Instruction::skip(), tag("skip"))(input)
}
fn barrier_instruction(input: &str) -> IResult<&str, ir::Instruction> {
value(ir::Instruction::barrier(), tag("spbarr"))(input)
}
fn flush_instruction(input: &str) -> IResult<&str, ir::Instruction> {
value(ir::Instruction::flush(), tag("flush"))(input)
}
fn assignment_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((
register,
preceded(space0, tag("<-")),
preceded(space0, expression),
)),
|(reg, _, expr)| ir::Instruction::assign(reg, expr),
)(input)
}
fn conditional_assignment_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((
tag("cmov"),
preceded(space1, expression),
preceded(space0, char(',')),
preceded(space0, register),
preceded(space0, tag("<-")),
preceded(space0, expression),
)),
|(_, cond, _, reg, _, expr)| ir::Instruction::assign_if(cond, reg, expr),
)(input)
}
fn load_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((
tag("load"),
preceded(space1, register),
preceded(space0, char(',')),
preceded(space0, expression),
)),
|(_, reg, _, addr)| ir::Instruction::load(reg, addr),
)(input)
}
fn store_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((
tag("store"),
preceded(space1, register),
preceded(space0, char(',')),
preceded(space0, expression),
)),
|(_, reg, _, addr)| ir::Instruction::store(reg, addr),
)(input)
}
fn jump_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((tag("jmp"), preceded(space1, target))),
|(_, target)| ir::Instruction::jump(target),
)(input)
}
fn branch_if_zero_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((
tag("beqz"),
preceded(space1, register),
preceded(space0, char(',')),
preceded(space0, target),
)),
|(_, reg, _, target)| ir::Instruction::branch_if_zero(reg, target),
)(input)
}
fn instruction(input: &str) -> IResult<&str, ir::Instruction> {
alt((
skip_instruction,
barrier_instruction,
flush_instruction,
assignment_instruction,
conditional_assignment_instruction,
load_instruction,
store_instruction,
jump_instruction,
branch_if_zero_instruction,
))(input)
}
fn labeled_instruction(input: &str) -> IResult<&str, ir::Instruction> {
map(
tuple((label, preceded(whitespaces_or_comment, instruction))),
|(lbl, mut inst)| {
inst.set_label(lbl);
inst
},
)(input)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_identifier() {
assert_eq!(identifier("rax"), Ok(("", "rax".to_string())));
assert_eq!(identifier("rax%"), Ok(("%", "rax".to_string())));
assert_eq!(identifier("r3"), Ok(("", "r3".to_string())));
assert_eq!(identifier("r_3"), Ok(("", "r_3".to_string())));
assert_eq!(identifier("r3&"), Ok(("&", "r3".to_string())));
}
#[test]
fn parse_dec_num() {
assert_eq!(dec_num("0"), Ok(("", 0)));
assert_eq!(dec_num("3"), Ok(("", 3)));
assert_eq!(dec_num("42"), Ok(("", 42)));
}
#[test]
fn parse_hex_num() {
assert_eq!(hex_num("0x0"), Ok(("", 0)));
assert_eq!(hex_num("0xC0ffee"), Ok(("", 12_648_430)));
}
#[test]
fn parse_number_literal() {
assert_eq!(
expression("042"),
Ok(("", ir::Expression::NumberLiteral(42)))
);
assert_eq!(
expression("0x42"),
Ok(("", ir::Expression::NumberLiteral(66)))
);
}
#[test]
fn parse_register_ref() {
assert_eq!(
expression("rax"),
Ok((
"",
ir::Expression::RegisterRef(ir::Register::new("rax".to_string()))
))
);
}
macro_rules! parse_unary_expressions {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (op_text, op_type) = $value;
let input = format!("{} 42", op_text);
assert_eq!(
expression(&input),
Ok((
"",
ir::Expression::Unary {
expr: Box::new(ir::Expression::NumberLiteral(42)),
op: op_type,
}
))
);
}
)*
}
}
parse_unary_expressions! {
parse_unary_expression_neg: ("-", ir::UnaryOperator::Neg),
parse_unary_expression_not: ("~", ir::UnaryOperator::Not),
}
macro_rules! parse_binary_expressions {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (op_text, op_type) = $value;
let input = format!("42 {} x", op_text);
assert_eq!(
expression(&input),
Ok((
"",
ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(42)),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new("x".to_string()))),
op: op_type,
}
))
);
}
)*
}
}
parse_binary_expressions! {
parse_compare_expression_eq: ("=", ir::BinaryOperator::r#Eq),
parse_compare_expression_neq: ("\\=", ir::BinaryOperator::Neq),
parse_compare_expression_sle: ("<=", ir::BinaryOperator::SLe),
parse_compare_expression_slt: ("<", ir::BinaryOperator::SLt),
parse_compare_expression_sge: (">=", ir::BinaryOperator::SGe),
parse_compare_expression_sgt: (">", ir::BinaryOperator::SGt),
parse_bitwise_expression_and: ("/\\", ir::BinaryOperator::And),
parse_bitwise_expression_or: ("\\/", ir::BinaryOperator::Or),
parse_bitwise_expression_xor: ("#", ir::BinaryOperator::Xor),
parse_bitwise_expression_shl: ("<<", ir::BinaryOperator::Shl),
parse_bitwise_expression_ashr: (">>>", ir::BinaryOperator::AShr),
parse_bitwise_expression_lshr: (">>", ir::BinaryOperator::LShr),
parse_arithmetic_expression_add: ("+", ir::BinaryOperator::Add),
parse_arithmetic_expression_sub: ("-", ir::BinaryOperator::Sub),
parse_arithmetic_expression_mul: ("*", ir::BinaryOperator::Mul),
parse_arithmetic_expression_udiv: ("/", ir::BinaryOperator::UDiv),
}
macro_rules! parse_binary_functions {
($($name:ident: $value:expr,)*) => {
$(
#[test]
fn $name() {
let (op_text, op_type) = $value;
let input = format!("{}(42, x)", op_text);
assert_eq!(
expression(&input),
Ok((
"",
ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(42)),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new("x".to_string()))),
op: op_type,
}
))
);
}
)*
}
}
parse_binary_functions! {
parse_binary_function_and: ("and", ir::BinaryOperator::And),
parse_binary_function_or: ("or", ir::BinaryOperator::Or),
parse_binary_function_xor: ("xor", ir::BinaryOperator::Xor),
parse_binary_function_urem: ("rem", ir::BinaryOperator::URem),
parse_binary_function_srem: ("srem", ir::BinaryOperator::SRem),
parse_binary_function_smod: ("mod", ir::BinaryOperator::SMod),
parse_binary_function_ule: ("ule", ir::BinaryOperator::ULe),
parse_binary_function_ult: ("ult", ir::BinaryOperator::ULt),
parse_binary_function_uge: ("uge", ir::BinaryOperator::UGe),
parse_binary_function_ugt: ("ugt", ir::BinaryOperator::UGt),
}
#[test]
fn parse_clasped_expression() {
assert_eq!(
expression("(1)"),
Ok(("", ir::Expression::NumberLiteral(1)))
);
}
#[test]
fn parse_conditional_expression() {
assert_eq!(
expression("ite(1, 2, 3)"),
Ok((
"",
ir::Expression::Conditional {
cond: Box::new(ir::Expression::NumberLiteral(1)),
then: Box::new(ir::Expression::NumberLiteral(2)),
r#else: Box::new(ir::Expression::NumberLiteral(3)),
}
))
);
}
#[test]
fn check_operator_precedence_bitwise_mul() {
assert_eq!(
expression("1 << 2 * 3 << 4"),
Ok((
"",
ir::Expression::Binary {
lhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(1)),
rhs: Box::new(ir::Expression::NumberLiteral(2)),
op: ir::BinaryOperator::Shl,
}),
rhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(3)),
rhs: Box::new(ir::Expression::NumberLiteral(4)),
op: ir::BinaryOperator::Shl,
}),
op: ir::BinaryOperator::Mul,
}
))
);
}
#[test]
fn check_operator_precedence_mul_add() {
assert_eq!(
expression("1 * 2 + 3 * 4"),
Ok((
"",
ir::Expression::Binary {
lhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(1)),
rhs: Box::new(ir::Expression::NumberLiteral(2)),
op: ir::BinaryOperator::Mul,
}),
rhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(3)),
rhs: Box::new(ir::Expression::NumberLiteral(4)),
op: ir::BinaryOperator::Mul,
}),
op: ir::BinaryOperator::Add,
}
))
);
}
#[test]
fn check_operator_precedence_add_compare() {
assert_eq!(
expression("1 + 2 = 3 + 4"),
Ok((
"",
ir::Expression::Binary {
lhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(1)),
rhs: Box::new(ir::Expression::NumberLiteral(2)),
op: ir::BinaryOperator::Add,
}),
rhs: Box::new(ir::Expression::Binary {
lhs: Box::new(ir::Expression::NumberLiteral(3)),
rhs: Box::new(ir::Expression::NumberLiteral(4)),
op: ir::BinaryOperator::Add,
}),
op: ir::BinaryOperator::r#Eq,
}
))
);
}
#[test]
fn parse_skip_instruction() {
assert_eq!(instruction("skip"), Ok(("", ir::Instruction::skip())));
}
#[test]
fn parse_barrier_instruction() {
assert_eq!(instruction("spbarr"), Ok(("", ir::Instruction::barrier())));
}
#[test]
fn parse_flush_instruction() {
assert_eq!(instruction("flush"), Ok(("", ir::Instruction::flush())));
}
#[test]
fn parse_assignment_instruction() {
assert_eq!(
instruction("x <- 42"),
Ok((
"",
ir::Instruction::assign(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42)
)
))
);
}
#[test]
fn parse_conditional_assignment_instruction() {
assert_eq!(
instruction("cmov 0, x <- 42"),
Ok((
"",
ir::Instruction::assign_if(
ir::Expression::NumberLiteral(0),
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42)
)
))
);
}
#[test]
fn parse_load_instruction() {
assert_eq!(
instruction("load x, 42"),
Ok((
"",
ir::Instruction::load(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42)
)
))
);
}
#[test]
fn parse_store_instruction() {
assert_eq!(
instruction("store x, 42"),
Ok((
"",
ir::Instruction::store(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42)
)
))
);
}
#[test]
fn parse_jump_instruction() {
assert_eq!(
instruction("jmp 42"),
Ok(("", ir::Instruction::jump(ir::Target::Location(42))))
);
assert_eq!(
instruction("jmp lbl"),
Ok((
"",
ir::Instruction::jump(ir::Target::Label("lbl".to_string()))
))
);
}
#[test]
fn parse_branch_if_zero_instruction() {
assert_eq!(
instruction("beqz x, 42"),
Ok((
"",
ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42)
)
))
);
assert_eq!(
instruction("beqz x, lbl"),
Ok((
"",
ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Label("lbl".to_string())
)
))
);
}
#[test]
fn parse_label() {
assert_eq!(label("end:"), Ok(("", "end".to_string())));
}
#[test]
fn parse_labeled_instruction_with_space_between() {
let mut expected_inst = ir::Instruction::skip();
expected_inst.set_label("Then".to_string());
assert_eq!(labeled_instruction("Then: skip"), Ok(("", expected_inst)));
}
#[test]
fn parse_labeled_instruction_with_newline_between() {
let mut expected_inst = ir::Instruction::skip();
expected_inst.set_label("Then".to_string());
assert_eq!(labeled_instruction("Then:\n skip"), Ok(("", expected_inst)));
}
#[test]
fn parse_well_formatted_program_with_single_instruction() {
assert_eq!(
parse_program("beqz x, 42\nstore x, 42"),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::store(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42),
);
inst.set_address(1);
inst
},
]))
);
}
#[test]
fn parse_well_formatted_program_with_two_instructions() {
assert_eq!(
parse_program("beqz x, 42\nstore x, 42"),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::store(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42),
);
inst.set_address(1);
inst
},
]))
);
}
#[test]
fn parse_program_with_multiple_newlines() {
assert_eq!(
parse_program("beqz x, 42\n\n\nstore x, 42"),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::store(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42),
);
inst.set_address(1);
inst
},
]))
);
}
#[test]
fn parse_program_with_leading_newline() {
assert_eq!(
parse_program("\nbeqz x, 42"),
Ok(ir::Program::new(vec![{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
}]))
);
}
#[test]
fn parse_program_with_trailing_newline() {
assert_eq!(
parse_program("beqz x, 42\n"),
Ok(ir::Program::new(vec![{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
}]))
);
}
#[test]
fn parse_program_with_spaces_and_tabs() {
assert_eq!(
parse_program(" \tbeqz x, 42\t\n \n\n store x, 42 "),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("x".to_string()),
ir::Target::Location(42),
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::store(
ir::Register::new("x".to_string()),
ir::Expression::NumberLiteral(42),
);
inst.set_address(1);
inst
},
]))
);
}
#[test]
fn parse_test_program() {
let src = r#"
cond <- x < array1_len
beqz cond, 5
load v, array1 + x
load tmp, array2 + v << 8
"#;
assert_eq!(
parse_program(src),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::assign(
ir::Register::new("cond".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::SLt,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"x".to_string(),
))),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array1_len".to_string(),
))),
},
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("cond".to_string()),
ir::Target::Location(5),
);
inst.set_address(1);
inst
},
{
let mut inst = ir::Instruction::load(
ir::Register::new("v".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::Add,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array1".to_string(),
))),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"x".to_string(),
))),
},
);
inst.set_address(2);
inst
},
{
let mut inst = ir::Instruction::load(
ir::Register::new("tmp".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::Add,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array2".to_string(),
))),
rhs: Box::new(ir::Expression::Binary {
op: ir::BinaryOperator::Shl,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"v".to_string(),
))),
rhs: Box::new(ir::Expression::NumberLiteral(8)),
}),
},
);
inst.set_address(3);
inst
},
]))
);
}
#[test]
fn parse_test_program_with_labels() {
let src = r#"
cond <- x < array1_len
beqz cond, EndIf
Then:
load v, array1 + x
load tmp, array2 + v << 8
EndIf:
skip
"#;
let program = ir::Program::new(vec![
{
let mut inst = ir::Instruction::assign(
ir::Register::new("cond".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::SLt,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"x".to_string(),
))),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array1_len".to_string(),
))),
},
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("cond".to_string()),
ir::Target::Label("EndIf".to_string()),
);
inst.set_address(1);
inst
},
{
let mut inst = ir::Instruction::load(
ir::Register::new("v".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::Add,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array1".to_string(),
))),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"x".to_string(),
))),
},
);
inst.set_address(2);
inst.set_label("Then".to_string());
inst
},
{
let mut inst = ir::Instruction::load(
ir::Register::new("tmp".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::Add,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"array2".to_string(),
))),
rhs: Box::new(ir::Expression::Binary {
op: ir::BinaryOperator::Shl,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"v".to_string(),
))),
rhs: Box::new(ir::Expression::NumberLiteral(8)),
}),
},
);
inst.set_address(3);
inst
},
{
let mut inst = ir::Instruction::skip();
inst.set_address(4);
inst.set_label("EndIf".to_string());
inst
},
]);
assert_eq!(parse_program(src), Ok(program));
}
#[test]
fn parse_erroneous_program() {
let src = r#"
unknowninstruction
"#;
assert_eq!(parse_program(src), Err("Failed to parse program!"));
}
#[test]
fn parse_empty_program() {
assert_eq!(parse_program(""), Ok(ir::Program::new(vec![])));
}
#[test]
fn parse_program_with_single_comment() {
assert_eq!(parse_program("% comment"), Ok(ir::Program::new(vec![])));
}
#[test]
fn parse_test_program_with_comments() {
let src = r#"
% test program
% start
c <- x < y
beqz c, 3 % jump to end
skip
% end
"#;
assert_eq!(
parse_program(src),
Ok(ir::Program::new(vec![
{
let mut inst = ir::Instruction::assign(
ir::Register::new("c".to_string()),
ir::Expression::Binary {
op: ir::BinaryOperator::SLt,
lhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"x".to_string(),
))),
rhs: Box::new(ir::Expression::RegisterRef(ir::Register::new(
"y".to_string(),
))),
},
);
inst.set_address(0);
inst
},
{
let mut inst = ir::Instruction::branch_if_zero(
ir::Register::new("c".to_string()),
ir::Target::Location(3),
);
inst.set_address(1);
inst
},
{
let mut inst = ir::Instruction::skip();
inst.set_address(2);
inst
},
]))
);
}
}
|
//! Wrapper types for *fusing* guard instances and protected pointers or
//! references.
use core::convert::TryInto;
use core::fmt;
use core::mem;
use conquer_pointer::{MarkedNonNull, MarkedPtr, Null};
use crate::{Protect, Protected, Shared};
// *************************************************************************************************
// FusedProtected
// *************************************************************************************************
/// An owned guard fused with a (nullable) [`Protected`] pointer.
pub struct FusedProtected<T, G, const N: usize> {
/// The owned guard.
pub(crate) guard: G,
/// The protected pointer.
pub(crate) protected: MarkedPtr<T, N>,
}
/********** impl inherent *************************************************************************/
impl<T, G: Protect<T>, const N: usize> FusedProtected<T, G, N> {
/// Creates a new [`FusedProtected`] from `guard` with a `null` pointer.
#[inline]
pub fn null(guard: G) -> Self {
Self { guard, protected: MarkedPtr::null() }
}
/// Returns the inner [`Protected`] pointer.
#[inline]
pub fn as_protected(&self) -> Protected<T, G::Reclaim, N> {
unsafe { Protected::from_marked_ptr(self.protected) }
}
/// Attempts to convert `self` into a [`FusedShared`].
///
/// # Errors
///
/// Fails, if `self` holds a `null` pointer, in which case an [`Err`] with
/// the original value and a [`Null`] instance is returned.
#[inline]
pub fn into_fused_shared(self) -> Result<FusedShared<T, G, N>, (Self, Null)> {
match self.protected.try_into() {
Ok(shared) => Ok(FusedShared { guard: self.guard, shared }),
Err(null) => Err((self, null)),
}
}
/// Consumes `self` and returns the contained guard instance, forfeiting its
/// currently protected value.
#[inline]
pub fn into_guard(self) -> G {
self.guard
}
}
/********** impl Debug ****************************************************************************/
impl<T, G: Protect<T>, const N: usize> fmt::Debug for FusedProtected<T, G, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FusedProtected {{ ... }}")
}
}
// *************************************************************************************************
// FusedProtectedRef
// *************************************************************************************************
/// A borrowed guard fused with a (nullable) [`Protected`] pointer.
pub struct FusedProtectedRef<'g, T, G, const N: usize> {
pub(crate) guard: &'g mut G,
pub(crate) protected: MarkedPtr<T, N>,
}
/********** impl inherent *************************************************************************/
impl<'g, T, G: Protect<T>, const N: usize> FusedProtectedRef<'g, T, G, N> {
/// Creates a new [`FusedProtectedRef`] from `guard` with a `null` pointer.
#[inline]
pub fn null(guard: &'g mut G) -> Self {
Self { guard, protected: MarkedPtr::null() }
}
/// Returns the inner [`Protected`] pointer.
#[inline]
pub fn as_protected(&self) -> Protected<T, G::Reclaim, N> {
unsafe { Protected::from_marked_ptr(self.protected) }
}
/// Attempts to convert `self` into a [`FusedSharedRef`].
#[inline]
pub fn into_fused_shared_ref(self) -> Result<FusedSharedRef<'g, T, G, N>, (Self, Null)> {
match self.protected.try_into() {
Ok(shared) => Ok(FusedSharedRef { guard: self.guard, shared }),
Err(null) => Err((self, null)),
}
}
#[inline]
pub fn into_guard_ref(self) -> &'g mut G {
self.guard
}
}
/********** impl Debug ****************************************************************************/
impl<T, G: Protect<T>, const N: usize> fmt::Debug for FusedProtectedRef<'_, T, G, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FusedProtectedRef {{ ... }}")
}
}
// *************************************************************************************************
// FusedShared
// *************************************************************************************************
/// An owned guard fused with a (non-nullable) [`Shared`] reference.
pub struct FusedShared<T, G, const N: usize> {
pub(crate) guard: G,
pub(crate) shared: MarkedNonNull<T, N>,
}
/********** impl inherent *************************************************************************/
impl<T, G: Protect<T>, const N: usize> FusedShared<T, G, N> {
#[inline]
pub fn adopt(mut guard: G, mut other: FusedShared<T, G, N>) -> (Self, G) {
mem::swap(&mut guard, &mut other.guard);
(Self { guard, shared: other.shared }, other.guard)
}
#[inline]
pub fn as_shared(&self) -> Shared<T, G::Reclaim, N> {
unsafe { Shared::from_marked_non_null(self.shared) }
}
#[inline]
pub fn transfer_to(mut self, mut guard: G) -> (Self, G) {
mem::swap(&mut self.guard, &mut guard);
(Self { guard, shared: self.shared }, self.guard)
}
#[inline]
pub fn transfer_to_ref(mut self, guard: &mut G) -> (FusedSharedRef<T, G, N>, G) {
mem::swap(&mut self.guard, guard);
(FusedSharedRef { guard, shared: self.shared }, self.guard)
}
#[inline]
pub fn into_fused_protected(self) -> FusedProtected<T, G, N> {
FusedProtected { guard: self.guard, protected: self.shared.into_marked_ptr() }
}
#[inline]
pub fn into_guard(self) -> G {
self.guard
}
}
/********** impl Debug ****************************************************************************/
impl<T, G: Protect<T>, const N: usize> fmt::Debug for FusedShared<T, G, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FusedShared {{ ... }}")
}
}
// *************************************************************************************************
// FusedSharedRef
// *************************************************************************************************
pub struct FusedSharedRef<'g, T, G, const N: usize> {
pub(crate) guard: &'g mut G,
pub(crate) shared: MarkedNonNull<T, N>,
}
/********** impl inherent *************************************************************************/
impl<'g, T, G: Protect<T>, const N: usize> FusedSharedRef<'g, T, G, N> {
#[inline]
pub fn as_shared(&self) -> Shared<T, G::Reclaim, N> {
unsafe { Shared::from_marked_non_null(self.shared) }
}
#[inline]
pub fn transfer_to(self, mut guard: G) -> FusedShared<T, G, N> {
mem::swap(self.guard, &mut guard);
FusedShared { guard, shared: self.shared }
}
#[inline]
pub fn transfer_to_ref<'h>(self, guard: &'h mut G) -> FusedSharedRef<'h, T, G, N> {
mem::swap(self.guard, guard);
FusedSharedRef { guard, shared: self.shared }
}
#[inline]
pub fn into_fused_protected_ref(self) -> FusedProtectedRef<'g, T, G, N> {
FusedProtectedRef { guard: self.guard, protected: self.shared.into_marked_ptr() }
}
#[inline]
pub fn into_shared(self) -> Shared<'g, T, G::Reclaim, N> {
unsafe { Shared::from_marked_non_null(self.shared) }
}
#[inline]
pub fn into_guard_ref(self) -> &'g mut G {
self.guard
}
}
/********** impl Debug ****************************************************************************/
impl<T, G: Protect<T>, const N: usize> fmt::Debug for FusedSharedRef<'_, T, G, N> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FusedSharedRef {{ ... }}")
}
}
|
use std::env;
use std::fs::File;
use std::io::{LineWriter, Write};
use regex::Regex;
use walkdir::WalkDir;
pub fn find_cleaned_fastq(path: &str, len: usize, sep: char, iscsv: bool) {
let save_names = get_fnames(iscsv);
let output = File::create(&save_names).expect("FILE EXISTS.");
let mut line = LineWriter::new(output);
write_header(&mut line, iscsv);
WalkDir::new(path)
.into_iter()
.filter_map(|ok| ok.ok())
.filter(|e| e.file_type().is_file())
.for_each(|e| {
let path = e.path().parent().unwrap();
let fname = e.path().file_name().unwrap().to_string_lossy();
if re_matches_lazy(&fname) {
let id = construct_id(&fname, len, sep);
let full_path = String::from(path.canonicalize().unwrap().to_string_lossy());
write_content(&mut line, &id, &full_path, iscsv);
}
});
print_saved_path(&save_names);
}
fn write_header<W: Write>(line: &mut W, iscsv: bool) {
if iscsv {
writeln!(line, "id,path").unwrap();
} else {
writeln!(line, "[seq]").unwrap();
}
}
fn write_content<W: Write>(line: &mut W, id: &str, full_path: &str, iscsv: bool) {
if iscsv {
writeln!(line, "{},{}/", id, full_path).unwrap();
} else {
writeln!(line, "{}:{}/", id, full_path).unwrap();
}
}
fn print_saved_path(save_names: &str) {
let path = env::current_dir().unwrap();
println!(
"Done! The result is saved as {}/{}",
path.display(),
save_names
);
}
fn get_fnames(iscsv: bool) -> String {
let mut fname = String::from("seq-finder");
if iscsv {
fname.push_str(".csv");
} else {
fname.push_str(".conf");
}
fname
}
fn re_matches_lazy(fname: &str) -> bool {
lazy_static! {
static ref RE: Regex = Regex::new(r"(_|-)((?i)(read|r)1)(?:.*)(gz|gzip)").unwrap();
}
RE.is_match(fname)
}
fn construct_id(names: &str, len: usize, sep: char) -> String {
let words: Vec<&str> = names.split(sep).collect();
assert!(words.len() > len, "NO. OF WORDS EXCEED THE SLICES");
let mut seqname = String::new();
words[0..(len - 1)].iter().for_each(|w| {
let comp = format!("{}{}", w, sep);
seqname.push_str(&comp);
});
seqname.push_str(words[len - 1]);
seqname
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn regex_test() {
let zipped_read = "sample_buno_clean_read1.fastq.gz";
let unzipped_read = "sample_buno_clean_read1.fastq";
assert_eq!(true, re_matches_lazy(zipped_read));
assert_eq!(false, re_matches_lazy(unzipped_read));
}
#[test]
fn regex_io_test() {
use glob::glob;
use std::path::PathBuf;
let path = "test_files/*";
let entries = glob(path)
.unwrap()
.filter_map(|ok| ok.ok())
.collect::<Vec<PathBuf>>();
let mut files = Vec::new();
entries.iter().for_each(|e| {
let path = String::from(e.file_name().unwrap().to_string_lossy());
if re_matches_lazy(&path) {
files.push(e);
}
});
assert_eq!(3, files.len());
}
#[test]
fn construct_id_test() {
let fnames = "sample_buno_ABCD123_read1.fastq.gz";
let id = construct_id(fnames, 3, '_');
assert_eq!("sample_buno_ABCD123", id);
}
#[test]
#[should_panic]
fn construct_id_panic_test() {
let fnames = "sample_buno_ABCD123_read1.fastq.gz";
construct_id(fnames, 4, '_');
}
}
|
use std::net::{SocketAddr, TcpListener, Ipv4Addr};
use std::sync::Arc;
use openssl::{pkey, x509};
use crate::builder::ContextBuilder;
use crate::context::ContextStore;
use crate::session::SessionStore;
mod builder;
mod context;
mod session;
const CERTIFICATE: &[u8] = include_bytes!("certificate.pem");
const PRIVATE_KEY: &[u8] = include_bytes!("private_key.pem");
pub struct Certificate {
pub certificate: x509::X509,
pub private_key: pkey::PKey<pkey::Private>,
}
impl Certificate {
pub fn new(certificate: x509::X509, private_key: pkey::PKey<pkey::Private>) -> Self {
Self {
certificate,
private_key,
}
}
}
fn main() {
let certificate = {
let mut certificates = x509::X509::stack_from_pem(CERTIFICATE).unwrap();
assert_eq!(certificates.len(), 1);
certificates.pop().unwrap()
};
let private_key = pkey::PKey::private_key_from_pem(PRIVATE_KEY).unwrap();
let default_certificate = Certificate::new(certificate, private_key);
let session_store = Arc::new(SessionStore::new());
let default_context = {
let mut builder = ContextBuilder::new_context_builder(&session_store).unwrap();
builder.add_certificate(&default_certificate).unwrap();
builder.build()
};
let context_store = Arc::new(ContextStore::new(default_context));
let acceptor = {
let mut builder = ContextBuilder::new_acceptor_builder(context_store, &session_store).unwrap();
builder.add_certificate(&default_certificate).unwrap();
builder.build()
};
let addr = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 8080);
let listener = TcpListener::bind(addr).unwrap();
let (tcp_stream, _) = listener.accept().unwrap();
acceptor.accept(tcp_stream).unwrap();
}
|
#[macro_use]
extern crate proc_macro_hack;
#[allow(unused_imports)]
#[macro_use]
extern crate unsauron_impl;
#[doc(hidden)]
pub use unsauron_impl::*;
proc_macro_expr_decl! {
unsauron! => expand_unsauron
}
|
// #![feature(async_closure)]
use frontend::Driver;
fn main() -> std::io::Result<()> {
let driver = Driver::new();
let _ = futures::executor::block_on(driver.parse_module("test.txt".to_string()));
Ok(())
}
|
use std::ops::{Index, IndexMut};
use typehack::data::*;
use typehack::dim::*;
use typehack::binary::Nat as BNat;
use typehack::binary::I;
use typehack::peano::Nat as PNat;
use typehack::peano::{S, Z};
use typehack::tvec::*;
#[macro_export]
macro_rules! Dims {
($t:ty $(, $rest:ty)*) => ($crate::typehack::tvec::TCons<$t, Dims![$($rest),*]>);
() => ($crate::typehack::tvec::TNil);
}
#[macro_export]
macro_rules! dims {
(($t:expr) $(, $rest:tt)*) => ($crate::typehack::tvec::TCons { elem: $crate::typehack::dim::Dyn($t), next: dims![$($rest),*] });
($t:ident $(, $rest:tt)*) => ($crate::typehack::tvec::TCons { elem: $t::as_data(), next: dims![$($rest),*] });
() => ($crate::typehack::tvec::TNil);
}
pub trait Dims<T>: Copy {
type Length: PNat;
type Index: Homogeneous<usize>;
type Product: Size<T>;
fn product(&self) -> Self::Product;
fn total(&self) -> usize;
fn is_valid(&self, &Self::Index) -> bool;
fn flatten(&self, Self::Index) -> usize;
fn flatten_safe(&self, Self::Index) -> Option<usize>;
fn expand(&self, usize) -> Self::Index;
}
macro_rules! dims_impl {
(@TCons) => {
type Length = S<L::Length>;
type Index = TCons<usize, L::Index>;
fn total(&self) -> usize {
self.elem.reify() * self.next.total()
}
fn is_valid(&self, idx: &TCons<usize, L::Index>) -> bool {
idx.elem < self.elem.reify() && self.next.is_valid(&idx.next)
}
fn flatten(&self, idx: TCons<usize, L::Index>) -> usize {
idx.elem * self.next.total() + self.next.flatten(idx.next)
}
fn flatten_safe(&self, idx: TCons<usize, L::Index>) -> Option<usize> {
if self.is_valid(&idx) {
Some(self.flatten(idx))
} else {
None
}
}
fn expand(&self, idx: usize) -> TCons<usize, L::Index> {
TCons {
elem: idx / self.next.total(),
next: self.next.expand(idx % self.next.total()),
}
}
};
(@TNil) => {
type Length = Z;
type Index = TNil;
type Product = I;
fn total(&self) -> usize {
1
}
fn is_valid(&self, _idx: &TNil) -> bool {
true
}
fn flatten(&self, _idx: TNil) -> usize {
0
}
fn flatten_safe(&self, _idx: TNil) -> Option<usize> {
Some(0)
}
fn expand(&self, _idx: usize) -> TNil {
TNil
}
};
}
impl<T, M: BNat + DimMul<N>, N: Dim + Size<T>, L: Dims<T, Product = N>> Dims<T> for TCons<M, L>
where M: DimMul<N>,
<M as DimMul<N>>::Result: Size<T>
{
type Product = <M as DimMul<N>>::Result;
fn product(&self) -> Self::Product {
DimMul::mul(self.elem, self.next.product())
}
dims_impl!(@TCons);
}
impl<T, L: Dims<T>> Dims<T> for TCons<Dyn, L>
where Dyn: DimMul<L::Product>,
<Dyn as DimMul<L::Product>>::Result: Size<T>
{
type Product = <Dyn as DimMul<L::Product>>::Result;
fn product(&self) -> Self::Product {
DimMul::mul(self.elem, self.next.product())
}
dims_impl!(@TCons);
}
impl<T> Dims<T> for TNil {
fn product(&self) -> Self::Product {
Self::Product::as_data()
}
dims_impl!(@TNil);
}
impl<T, X: Dim> Index<usize> for Array<T, TCons<X, TNil>>
where TCons<X, TNil>: Dims<T, Index = TCons<usize, TNil>>
{
type Output = T;
#[inline]
fn index(&self, idx: usize) -> &T {
self.get(TCons {
elem: idx,
next: TNil,
})
.unwrap()
}
}
impl<T, X: Dim> IndexMut<usize> for Array<T, TCons<X, TNil>>
where TCons<X, TNil>: Dims<T, Index = TCons<usize, TNil>>
{
#[inline]
fn index_mut(&mut self, idx: usize) -> &mut T {
self.get_mut(TCons {
elem: idx,
next: TNil,
})
.unwrap()
}
}
impl<T, X: Dim, Y: Dim> Index<[usize; 2]> for Array<T, TCons<X, TCons<Y, TNil>>>
where TCons<X, TCons<Y, TNil>>: Dims<T, Index = TCons<usize, TCons<usize, TNil>>>
{
type Output = T;
#[inline]
fn index(&self, idx: [usize; 2]) -> &T {
self.get(TCons {
elem: idx[0],
next: TCons {
elem: idx[1],
next: TNil,
},
})
.unwrap()
}
}
impl<T, X: Dim, Y: Dim> IndexMut<[usize; 2]> for Array<T, TCons<X, TCons<Y, TNil>>>
where TCons<X, TCons<Y, TNil>>: Dims<T, Index = TCons<usize, TCons<usize, TNil>>>
{
#[inline]
fn index_mut(&mut self, idx: [usize; 2]) -> &mut T {
self.get_mut(TCons {
elem: idx[0],
next: TCons {
elem: idx[1],
next: TNil,
},
})
.unwrap()
}
}
impl<T, X: Dim, Y: Dim, Z: Dim> Index<[usize; 3]> for Array<T, TCons<X, TCons<Y, TCons<Z, TNil>>>>
where TCons<X, TCons<Y, TCons<Z, TNil>>>: Dims<T,
Index = TCons<usize,
TCons<usize, TCons<usize, TNil>>>>
{
type Output = T;
#[inline]
fn index(&self, idx: [usize; 3]) -> &T {
self.get(TCons {
elem: idx[0],
next: TCons {
elem: idx[1],
next: TCons {
elem: idx[2],
next: TNil,
},
},
})
.unwrap()
}
}
impl<T, X: Dim, Y: Dim, Z: Dim> IndexMut<[usize; 3]>
for Array<T, TCons<X, TCons<Y, TCons<Z, TNil>>>>
where TCons<X, TCons<Y, TCons<Z, TNil>>>: Dims<T,
Index = TCons<usize,
TCons<usize, TCons<usize, TNil>>>>
{
#[inline]
fn index_mut(&mut self, idx: [usize; 3]) -> &mut T {
self.get_mut(TCons {
elem: idx[0],
next: TCons {
elem: idx[1],
next: TCons {
elem: idx[2],
next: TNil,
},
},
})
.unwrap()
}
}
pub struct Array<T, D: Dims<T>>
where D::Product: Size<T>
{
dims: D,
data: Data<T, D::Product>,
}
impl<T, D: Dims<T>> Array<T, D> {
#[inline]
pub fn length(&self) -> usize {
self.dims.total()
}
#[inline]
pub fn get(&self, idx: D::Index) -> Option<&T> {
self.dims.flatten_safe(idx).map(|i| &self.data[i])
}
#[inline]
pub fn get_mut(&mut self, idx: D::Index) -> Option<&mut T> {
self.dims.flatten_safe(idx).map(move |i| &mut self.data[i])
}
pub fn from_elem(dims: D, elem: &T) -> Self
where T: Clone
{
Array {
dims: dims,
data: Data::from_elem(dims.product(), elem),
}
}
pub fn from_fn<F: Fn(D::Index) -> T>(dims: D, f: F) -> Self {
Array {
dims: dims,
data: Data::from_fn(dims.product(), |idx| f(dims.expand(idx))),
}
}
}
|
#[derive(Debug, PartialEq)]
struct User {
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
// 传入参数变量构造结构体
fn build_user(username: String, email: String) -> User {
User {
username: username,
email: email,
active: true,
sign_in_count: 1,
}
// 参数名与字段名都完全相同,可以使用"字段初始化简写语法"
// 如这里无需重复 email 和 username
// User {
// username,
// email,
// active: true,
// sign_in_count: 1,
// }
}
fn main() {
// 直接赋值构造结构体
let user = User {
username: "name567".to_string(),
email: "another@example.com".to_string(),
sign_in_count: 1,
active: true,
};
assert_eq!(
user,
User {
username: "name567".to_string(),
email: "another@example.com".to_string(),
sign_in_count: 1,
active: true
}
);
// 通过函数构造结构体
let user = build_user(String::from("name567"), String::from("another@example.com"));
assert_eq!(
user,
User {
username: "name567".to_string(),
email: "another@example.com".to_string(),
sign_in_count: 1,
active: true
}
);
}
|
#[macro_use]
extern crate serde;
pub mod access_token {
include!("./access_token.rs");
}
pub mod add_collaborator_option {
include!("./add_collaborator_option.rs");
}
pub mod add_time_option {
include!("./add_time_option.rs");
}
pub mod annotated_tag {
include!("./annotated_tag.rs");
}
pub mod annotated_tag_object {
include!("./annotated_tag_object.rs");
}
pub mod api_error {
include!("./api_error.rs");
}
pub mod attachment {
include!("./attachment.rs");
}
pub mod branch {
include!("./branch.rs");
}
pub mod branch_protection {
include!("./branch_protection.rs");
}
pub mod comment {
include!("./comment.rs");
}
pub mod commit {
include!("./commit.rs");
}
pub mod commit_date_options {
include!("./commit_date_options.rs");
}
pub mod commit_meta {
include!("./commit_meta.rs");
}
pub mod commit_user {
include!("./commit_user.rs");
}
pub mod contents_response {
include!("./contents_response.rs");
}
pub mod create_branch_protection_option {
include!("./create_branch_protection_option.rs");
}
pub mod create_branch_repo_option {
include!("./create_branch_repo_option.rs");
}
pub mod create_email_option {
include!("./create_email_option.rs");
}
pub mod create_file_options {
include!("./create_file_options.rs");
}
pub mod create_fork_option {
include!("./create_fork_option.rs");
}
pub mod create_gpg_key_option {
include!("./create_gpg_key_option.rs");
}
pub mod create_hook_option {
include!("./create_hook_option.rs");
}
pub mod create_hook_option_config {
include!("./create_hook_option_config.rs");
}
pub mod create_issue_comment_option {
include!("./create_issue_comment_option.rs");
}
pub mod create_issue_option {
include!("./create_issue_option.rs");
}
pub mod create_key_option {
include!("./create_key_option.rs");
}
pub mod create_label_option {
include!("./create_label_option.rs");
}
pub mod create_milestone_option {
include!("./create_milestone_option.rs");
}
pub mod create_o_auth2_application_options {
include!("./create_o_auth2_application_options.rs");
}
pub mod create_org_option {
include!("./create_org_option.rs");
}
pub mod create_pull_request_option {
include!("./create_pull_request_option.rs");
}
pub mod create_pull_review_comment {
include!("./create_pull_review_comment.rs");
}
pub mod create_pull_review_options {
include!("./create_pull_review_options.rs");
}
pub mod create_release_option {
include!("./create_release_option.rs");
}
pub mod create_repo_option {
include!("./create_repo_option.rs");
}
pub mod create_status_option {
include!("./create_status_option.rs");
}
pub mod create_team_option {
include!("./create_team_option.rs");
}
pub mod create_user_option {
include!("./create_user_option.rs");
}
pub mod cron {
include!("./cron.rs");
}
pub mod delete_email_option {
include!("./delete_email_option.rs");
}
pub mod delete_file_options {
include!("./delete_file_options.rs");
}
pub mod deploy_key {
include!("./deploy_key.rs");
}
pub mod edit_attachment_options {
include!("./edit_attachment_options.rs");
}
pub mod edit_branch_protection_option {
include!("./edit_branch_protection_option.rs");
}
pub mod edit_deadline_option {
include!("./edit_deadline_option.rs");
}
pub mod edit_git_hook_option {
include!("./edit_git_hook_option.rs");
}
pub mod edit_hook_option {
include!("./edit_hook_option.rs");
}
pub mod edit_issue_comment_option {
include!("./edit_issue_comment_option.rs");
}
pub mod edit_issue_option {
include!("./edit_issue_option.rs");
}
pub mod edit_label_option {
include!("./edit_label_option.rs");
}
pub mod edit_milestone_option {
include!("./edit_milestone_option.rs");
}
pub mod edit_org_option {
include!("./edit_org_option.rs");
}
pub mod edit_pull_request_option {
include!("./edit_pull_request_option.rs");
}
pub mod edit_reaction_option {
include!("./edit_reaction_option.rs");
}
pub mod edit_release_option {
include!("./edit_release_option.rs");
}
pub mod edit_repo_option {
include!("./edit_repo_option.rs");
}
pub mod edit_team_option {
include!("./edit_team_option.rs");
}
pub mod edit_user_option {
include!("./edit_user_option.rs");
}
pub mod email {
include!("./email.rs");
}
pub mod external_tracker {
include!("./external_tracker.rs");
}
pub mod external_wiki {
include!("./external_wiki.rs");
}
pub mod file_commit_response {
include!("./file_commit_response.rs");
}
pub mod file_delete_response {
include!("./file_delete_response.rs");
}
pub mod file_links_response {
include!("./file_links_response.rs");
}
pub mod file_response {
include!("./file_response.rs");
}
pub mod general_api_settings {
include!("./general_api_settings.rs");
}
pub mod general_attachment_settings {
include!("./general_attachment_settings.rs");
}
pub mod general_repo_settings {
include!("./general_repo_settings.rs");
}
pub mod general_ui_settings {
include!("./general_ui_settings.rs");
}
pub mod get_orgs_org_teams_search_response {
include!("./get_orgs_org_teams_search_response.rs");
}
pub mod get_repos_owner_repo_languages_response {
include!("./get_repos_owner_repo_languages_response.rs");
}
pub mod get_users_search_response {
include!("./get_users_search_response.rs");
}
pub mod git_blob_response {
include!("./git_blob_response.rs");
}
pub mod git_entry {
include!("./git_entry.rs");
}
pub mod git_hook {
include!("./git_hook.rs");
}
pub mod git_object {
include!("./git_object.rs");
}
pub mod git_tree_response {
include!("./git_tree_response.rs");
}
pub mod gpg_key {
include!("./gpg_key.rs");
}
pub mod gpg_key_email {
include!("./gpg_key_email.rs");
}
pub mod hook {
include!("./hook.rs");
}
pub mod identity {
include!("./identity.rs");
}
pub mod internal_tracker {
include!("./internal_tracker.rs");
}
pub mod issue {
include!("./issue.rs");
}
pub mod issue_deadline {
include!("./issue_deadline.rs");
}
pub mod issue_labels_option {
include!("./issue_labels_option.rs");
}
pub mod issue_template {
include!("./issue_template.rs");
}
pub mod label {
include!("./label.rs");
}
pub mod markdown_option {
include!("./markdown_option.rs");
}
pub mod merge_pull_request_option {
include!("./merge_pull_request_option.rs");
}
pub mod migrate_repo_form {
include!("./migrate_repo_form.rs");
}
pub mod migrate_repo_options {
include!("./migrate_repo_options.rs");
}
pub mod milestone {
include!("./milestone.rs");
}
pub mod miscellaneous {
include!("./miscellaneous.rs");
}
pub mod notification_count {
include!("./notification_count.rs");
}
pub mod notification_subject {
include!("./notification_subject.rs");
}
pub mod notification_thread {
include!("./notification_thread.rs");
}
pub mod o_auth2_application {
include!("./o_auth2_application.rs");
}
pub mod organization {
include!("./organization.rs");
}
pub mod payload_commit {
include!("./payload_commit.rs");
}
pub mod payload_commit_verification {
include!("./payload_commit_verification.rs");
}
pub mod payload_user {
include!("./payload_user.rs");
}
pub mod permission {
include!("./permission.rs");
}
pub mod post_users_username_tokens_body {
include!("./post_users_username_tokens_body.rs");
}
pub mod pr_branch_info {
include!("./pr_branch_info.rs");
}
pub mod public_key {
include!("./public_key.rs");
}
pub mod pull_request {
include!("./pull_request.rs");
}
pub mod pull_request_meta {
include!("./pull_request_meta.rs");
}
pub mod pull_review {
include!("./pull_review.rs");
}
pub mod pull_review_comment {
include!("./pull_review_comment.rs");
}
pub mod pull_review_request_options {
include!("./pull_review_request_options.rs");
}
pub mod reaction {
include!("./reaction.rs");
}
pub mod reference {
include!("./reference.rs");
}
pub mod release {
include!("./release.rs");
}
pub mod repo_commit {
include!("./repo_commit.rs");
}
pub mod repo_topic_options {
include!("./repo_topic_options.rs");
}
pub mod repository {
include!("./repository.rs");
}
pub mod repository_meta {
include!("./repository_meta.rs");
}
pub mod search_results {
include!("./search_results.rs");
}
pub mod server_version {
include!("./server_version.rs");
}
pub mod status {
include!("./status.rs");
}
pub mod stop_watch {
include!("./stop_watch.rs");
}
pub mod submit_pull_review_options {
include!("./submit_pull_review_options.rs");
}
pub mod tag {
include!("./tag.rs");
}
pub mod team {
include!("./team.rs");
}
pub mod topic_name {
include!("./topic_name.rs");
}
pub mod topic_response {
include!("./topic_response.rs");
}
pub mod tracked_time {
include!("./tracked_time.rs");
}
pub mod transfer_repo_option {
include!("./transfer_repo_option.rs");
}
pub mod update_file_options {
include!("./update_file_options.rs");
}
pub mod user {
include!("./user.rs");
}
pub mod user_heatmap_data {
include!("./user_heatmap_data.rs");
}
pub mod watch_info {
include!("./watch_info.rs");
}
pub mod client {
use futures::Stream;
use parking_lot::Mutex;
use std::borrow::Cow;
use std::fmt::Debug;
use std::path::Path;
/// Common API errors.
#[derive(Debug, thiserror::Error)]
pub enum ApiError<R: Debug + Send + 'static> {
#[error("API request failed for path: {} (code: {})", _0, _1)]
Failure(String, http::status::StatusCode, Mutex<R>),
#[error("Unsupported media type in response: {}", _0)]
UnsupportedMediaType(String, Mutex<R>),
#[error("An error has occurred while performing the API request: {}", _0)]
Reqwest(reqwest::Error),
#[error("I/O error: {}", _0)]
Io(std::io::Error),
#[error("Error en/decoding \"application/json\" data: {}", _0)]
ApplicationJson(serde_json::Error),
#[error("Error en/decoding \"application/yaml\" data: {}", _0)]
ApplicationYaml(serde_yaml::Error),
}
/// Form object for building multipart request body.
pub trait Form: Sized {
/// Creates a new builder.
fn new() -> Self;
/// Adds the given key and value as text.
fn text<T, U>(self, key: T, value: U) -> Self
where T: Into<Cow<'static, str>>,
U: Into<Cow<'static, str>>;
/// Adds the file from the given path for streaming.
fn file<K>(self, key: K, path: &Path) -> std::io::Result<Self>
where K: Into<Cow<'static, str>>;
}
/// HTTP Request.
pub trait Request {
type Form: Form;
/// Sets the header with the given key and value.
fn header(self, name: &'static str, value: &str) -> Self;
/// Sets body using the given vector of bytes.
///
/// **NOTE:** Appropriate `Content-Type` header must be set
/// after calling this method.
fn body_bytes(self, body: Vec<u8>) -> Self;
/// Sets JSON body based on the given value.
fn json<T: serde::Serialize>(self, value: &T) -> Self;
/// Sets `multipart/form-data` body using the given form.
fn multipart_form_data(self, form: Self::Form) -> Self;
/// Sets/adds query parameters based on the given value.
///
/// **NOTE:** This method must be called only once. It's unspecified
/// as to whether this appends/replaces query parameters.
fn query<T: serde::Serialize>(self, params: &T) -> Self;
}
impl Form for reqwest::multipart::Form {
fn new() -> Self {
reqwest::multipart::Form::new()
}
fn text<T, U>(self, key: T, value: U) -> Self
where T: Into<Cow<'static, str>>,
U: Into<Cow<'static, str>>
{
reqwest::multipart::Form::text(self, key, value)
}
fn file<K>(self, key: K, path: &Path) -> std::io::Result<Self>
where K: Into<Cow<'static, str>>
{
use reqwest::multipart::{Form, Part};
use tokio_util::codec::{BytesCodec, FramedRead};
let fd = std::fs::File::open(path)?;
let reader = tokio::fs::File::from_std(fd);
let bytes_stream = FramedRead::new(reader, BytesCodec::new());
let part = Part::stream(reqwest::Body::wrap_stream(bytes_stream));
Ok(Form::part(self, key, part))
}
}
impl Request for reqwest::RequestBuilder {
type Form = reqwest::multipart::Form;
fn header(self, name: &'static str, value: &str) -> Self {
reqwest::RequestBuilder::header(self, name, value)
}
fn multipart_form_data(self, form: Self::Form) -> Self {
self.multipart(form)
}
fn body_bytes(self, body: Vec<u8>) -> Self {
self.body(body)
}
fn json<T: serde::Serialize>(self, value: &T) -> Self {
<reqwest::RequestBuilder>::json(self, value)
}
fn query<T: serde::Serialize>(self, params: &T) -> Self {
reqwest::RequestBuilder::query(self, params)
}
}
/// HTTP Response.
#[async_trait::async_trait]
pub trait Response: Debug + Send + Sized {
type Bytes: AsRef<[u8]>;
type Error;
/// Gets the value for the given header name, if any.
fn header(&self, name: &'static str) -> Option<&str>;
/// Takes all headers from the response.
fn take_headers(&mut self) -> http::header::HeaderMap;
/// Status code for this response.
fn status(&self) -> http::status::StatusCode;
/// Media type for this response body (if any).
fn media_type(&self) -> Option<mime::MediaType>;
/// Response body as a stream.
fn stream(self) -> Box<dyn Stream<Item=Result<Self::Bytes, Self::Error>> + Unpin>;
/// Vector of bytes from the response body.
async fn body_bytes(self) -> Result<Self::Bytes, ApiError<Self>>;
}
#[async_trait::async_trait]
impl Response for reqwest::Response {
type Bytes = bytes::Bytes;
type Error = reqwest::Error;
fn header(&self, name: &'static str) -> Option<&str> {
self.headers().get(name).and_then(|v| v.to_str().ok())
}
fn take_headers(&mut self) -> http::header::HeaderMap {
std::mem::replace(self.headers_mut(), http::header::HeaderMap::new())
}
fn status(&self) -> http::status::StatusCode {
reqwest::Response::status(self)
}
fn media_type(&self) -> Option<mime::MediaType> {
self.header(http::header::CONTENT_TYPE.as_str())
.and_then(|v| v.parse().ok())
}
fn stream(self) -> Box<dyn Stream<Item=Result<Self::Bytes, Self::Error>> + Unpin> {
Box::new(self.bytes_stream()) as Box<_>
}
async fn body_bytes(self) -> Result<Self::Bytes, ApiError<Self>> {
Ok(self.bytes().await.map_err(ApiError::Reqwest)?)
}
}
/// Represents an API client.
#[async_trait::async_trait]
pub trait ApiClient {
type Request: Request + Send;
type Response: Response;
/// Consumes a method and a relative path and produces a request builder for a single API call.
fn request_builder(&self, method: http::Method, rel_path: &str) -> Self::Request;
/// Performs the HTTP request using the given `Request` object
/// and returns a `Response` future.
async fn make_request(&self, req: Self::Request) -> Result<Self::Response, ApiError<Self::Response>>;
}
#[async_trait::async_trait]
impl ApiClient for reqwest::Client {
type Request = reqwest::RequestBuilder;
type Response = reqwest::Response;
fn request_builder(&self, method: http::Method, rel_path: &str) -> Self::Request {
let mut u = String::from("https://example.com/api/v1");
u.push_str(rel_path.trim_start_matches('/'));
self.request(method, &u)
}
async fn make_request(&self, req: Self::Request) -> Result<Self::Response, ApiError<Self::Response>> {
let req = req.build().map_err(ApiError::Reqwest)?;
let resp = self.execute(req).await.map_err(ApiError::Reqwest)?;
Ok(resp)
}
}
/// A trait for indicating that the implementor can send an API call.
#[async_trait::async_trait]
pub trait Sendable<Client>
where
Client: ApiClient + Sync + 'static,
Self: Sized
{
/// The output object from this API request.
type Output: serde::de::DeserializeOwned;
/// HTTP method used by this call.
const METHOD: http::Method;
/// Relative URL for this API call formatted appropriately with parameter values.
///
/// **NOTE:** This URL **must** begin with `/`.
fn rel_path(&self) -> std::borrow::Cow<'static, str>;
/// Modifier for this object. Builders override this method if they
/// wish to add query parameters, set body, etc.
fn modify(&self, req: Client::Request) -> Result<Client::Request, ApiError<Client::Response>> {
Ok(req)
}
/// Sends the request and returns a future for the response object.
async fn send(&self, client: &Client) -> Result<ResponseWrapper<Self::Output, Self>, ApiError<Client::Response>> {
let resp = self.send_raw(client).await?;
let media = resp.media_type();
if let Some(ty) = media {
if media_types::M_0.matches(&ty) {
return ResponseWrapper::wrap(resp, |r| async {
let bytes = r.body_bytes().await?;
serde_json::from_reader(bytes.as_ref()).map_err(ApiError::from)
}).await
}
else if media_types::M_1.matches(&ty) {
return ResponseWrapper::wrap(resp, |r| async {
let bytes = r.body_bytes().await?;
serde_yaml::from_reader(bytes.as_ref()).map_err(ApiError::from)
}).await
}
}
let ty = resp.header(http::header::CONTENT_TYPE.as_str())
.map(|v| String::from_utf8_lossy(v.as_bytes()).into_owned())
.unwrap_or_default();
Err(ApiError::UnsupportedMediaType(ty, Mutex::new(resp)))
}
/// Convenience method for returning a raw response after sending a request.
async fn send_raw(&self, client: &Client) -> Result<Client::Response, ApiError<Client::Response>> {
let rel_path = self.rel_path();
let req = self.modify(client.request_builder(Self::METHOD, &rel_path))?;
let resp = client.make_request(req).await?;
if resp.status().is_success() {
Ok(resp)
} else {
Err(ApiError::Failure(rel_path.into_owned(), resp.status(), Mutex::new(resp)))
}
}
}
/// Wrapper containing response-related information.
pub struct ResponseWrapper<T, B> {
/// Response object
pub object: T,
/// Response headers
pub headers: http::HeaderMap,
/// Response status code
pub status: http::status::StatusCode,
_builder: core::marker::PhantomData<B>,
}
impl<T, B> ResponseWrapper<T, B> {
pub(crate) async fn wrap<F, R>(mut resp: R, f: impl FnOnce(R) -> F) -> Result<Self, ApiError<R>>
where F: std::future::Future<Output=Result<T, ApiError<R>>>,
R: Response + 'static
{
let status = resp.status();
let headers = resp.take_headers();
Ok(ResponseWrapper {
object: f(resp).await?,
headers,
status,
_builder: core::marker::PhantomData,
})
}
}
impl<'de, T, B> serde::de::Deserialize<'de> for ResponseWrapper<T, B> {
fn deserialize<D>(_: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>
{
unimplemented!("ResponseWrapper is not supposed to be deserialized.");
}
}
impl<T, B> std::ops::Deref for ResponseWrapper<T, B> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.object
}
}
impl<T, B> std::ops::DerefMut for ResponseWrapper<T, B> {
fn deref_mut(&mut self) -> &mut <Self as std::ops::Deref>::Target {
&mut self.object
}
}
pub mod media_types {
use lazy_static::lazy_static;
lazy_static! {
pub static ref M_0: mime::MediaRange =
mime::MediaRange::parse("application/json").expect("cannot parse \"application/json\" as media range");
pub static ref M_1: mime::MediaRange =
mime::MediaRange::parse("application/yaml").expect("cannot parse \"application/yaml\" as media range");
}
}
impl<R: Response + 'static> From<std::io::Error> for ApiError<R> {
fn from(e: std::io::Error) -> Self {
ApiError::Io(e)
}
}
impl<R: Response + 'static> From<serde_json::Error> for ApiError<R> {
fn from(e: serde_json::Error) -> Self {
ApiError::ApplicationJson(e)
}
}
impl<R: Response + 'static> From<serde_yaml::Error> for ApiError<R> {
fn from(e: serde_yaml::Error) -> Self {
ApiError::ApplicationYaml(e)
}
}
}
pub mod generics {
include!("./generics.rs");
}
pub mod util {
include!("./util.rs");
}
|
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct EchoMsg {
pub payload: String,
pub ts: DateTime<Utc>,
}
|
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
struct Cli {
//pattern: String,
#[structopt(parse(from_os_str))]
path: std::path::PathBuf,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Cli::from_args();
let path_exists = obliterate::file_io::path_exists(&args.path);
if path_exists {
println!(
"Does file {} exist? --> {}",
args.path.display(),
path_exists
);
// Check whether the path is a file or a directory
let is_a_file = obliterate::file_io::is_a_file(&args.path);
if !is_a_file {
println!("Can't obliterate {}", args.path.display());
println!("{} is a directory", args.path.display());
} else {
// Delete the file
obliterate::file_io::delete_file(&args.path)?;
println!("Obliterating {}", args.path.display());
println!("Done!")
}
} else {
println!("No file/directory {} exists!", args.path.display());
}
Ok(())
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum ToolOptions {
Select { append_mode: SelectAppendMode },
Ellipse,
Shape { shape_type: ShapeType },
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum SelectAppendMode {
New,
Add,
Subtract,
Intersect,
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub enum ShapeType {
Star { vertices: u32 },
Polygon { vertices: u32 },
}
|
use htmldsl::attributes;
use htmldsl::elements;
use htmldsl::units;
use htmldsl::TagRenderableIntoElement;
use crate::html::shared;
use crate::html::util;
use crate::models;
pub fn page<'a>(game: models::Game) -> elements::Body<'a> {
elements::Body::style_less(vec![
shared::index_link(),
shared::games_link(),
elements::H3::style_less(vec![
elements::A::style_less(
attributes::Href {
value: units::SourceValue::new(format!("/games/{}", game.id)),
},
vec![htmldsl::text("this game")],
)
.into_element(),
elements::A::style_less(
attributes::Href {
value: units::SourceValue::new(format!("/games/{}/edit", game.id)),
},
vec![htmldsl::text("edit")],
)
.into_element(),
])
.into_element(),
game.into_html(false),
util::cursor_form_button(game.id, models::Direction::Left, false),
util::cursor_form_button(game.id, models::Direction::Up, false),
util::cursor_form_button(game.id, models::Direction::Down, false),
util::cursor_form_button(game.id, models::Direction::Right, false),
])
}
|
use crate::importer::state::Difference;
use git2::Cred;
use git2::RemoteCallbacks;
use git2::Repository;
use std::error::Error;
use std::{
fs, io,
path::{Path, PathBuf},
};
use log::{debug, info};
pub fn find_equal_files<F>(
src: &Path,
dest: &Path,
cur: &Path,
ignore_files: &Vec<PathBuf>,
op: &mut F,
) -> io::Result<()>
where
F: FnMut(&Path, &Path, &Path) -> io::Result<()>,
{
let cur_dir = src.join(cur);
if cur_dir.is_dir() {
for entry in fs::read_dir(cur_dir)? {
let entry = entry?;
let path = entry.path();
if ignore_files.contains(&path.to_path_buf()) {
debug!("Ignoring {:?}", path);
continue;
}
if path.is_dir() {
let cur = path.strip_prefix(src).unwrap();
find_equal_files(src, dest, &cur, ignore_files, op)?;
} else if path.is_file() {
op(&path, &dest.join(cur).join(path.file_name().unwrap()), cur)?;
}
}
}
Ok(())
}
/// Find all directories that are equal to src. Returns dest dirs.
pub fn find_equal_dir<F>(
src_path: &Path,
dest_path: &Path,
relative_path: &Path,
op: &mut F,
) -> io::Result<()>
where
F: FnMut(&Path) -> io::Result<()>,
{
let src_cur = src_path.join(relative_path);
if src_cur.is_dir() {
let dest_cur = dest_path.join(relative_path);
// skip if does not exist
if !dest_cur.exists() {
return Ok(());
}
op(&dest_cur)?;
for entry in fs::read_dir(src_cur)? {
let entry = entry?;
let src_entry = entry.path();
if src_entry.is_dir() {
let current_relative_path = src_entry.strip_prefix(src_path).unwrap();
find_equal_dir(src_path, dest_path, current_relative_path, op)?;
}
}
}
Ok(())
}
/// Find all files if directly in one of the same folders of src_path without following symlinks
pub fn _find_all_files_symlink<F>(path: &Path, op: &mut F) -> io::Result<()>
where
F: FnMut(&Path) -> io::Result<()>,
{
if path.is_dir() {
for entry in fs::read_dir(path)? {
let path = entry?.path();
if path.is_dir() {
_find_all_files_symlink(&path, op)?;
} else if path.symlink_metadata()?.is_file() {
op(&path)?;
}
}
}
Ok(())
}
pub fn repository_push(
repository: &git2::Repository,
private_key_path: &Path,
) -> Result<(), Box<dyn Error>> {
let mut remote = repository.find_remote("origin")?;
let mut po = git2::PushOptions::new();
po.remote_callbacks(get_callbacks(private_key_path));
remote.push(&["refs/heads/master:refs/heads/master"], Some(&mut po))?;
Ok(())
}
pub fn repository_commit(
paths: Vec<&Path>,
repository: &git2::Repository,
description: &str,
) -> Result<(), git2::Error> {
let signature = get_signature()?;
let mut index = repository.index()?;
for path in paths.iter() {
index.add_path(path)?;
}
index.write()?;
let oid = index.write_tree()?;
let parent_commit = repository.head()?.peel_to_commit()?;
let tree = repository.find_tree(oid)?;
repository.commit(
Some("HEAD"),
&signature,
&signature,
description,
&tree,
&[&parent_commit],
)?;
Ok(())
}
pub fn repository_commit_all(
repository: &git2::Repository,
description: &str,
) -> Result<(), git2::Error> {
let signature = get_signature()?;
let mut index = repository.index()?;
index.add_all(["."].iter(), git2::IndexAddOption::DEFAULT, None)?;
index.write()?;
let oid = index.write_tree()?;
let parent_commit = repository.head()?.peel_to_commit()?;
let tree = repository.find_tree(oid)?;
repository.commit(
Some("HEAD"),
&signature,
&signature,
description,
&tree,
&[&parent_commit],
)?;
Ok(())
}
pub fn repository_update(
repository: &Repository,
private_key_path: &Path,
) -> Result<(), git2::Error> {
let mut remote = repository.find_remote("origin")?;
let mut options = git2::FetchOptions::new();
options.remote_callbacks(get_callbacks(private_key_path));
remote.fetch(&["master"], Some(&mut options), None)?;
Ok(())
}
pub fn repository_fetch(
url: &str,
path: &Path,
private_key_path: &Path,
) -> Result<Repository, Box<dyn Error>> {
if url.len() == 0 {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Repository url can not be empty",
)
.into());
}
let repo = match Repository::open(&path) {
Ok(r) => r,
Err(_) => {
info!("Repository path does not exist cloning...");
let mut fo = git2::FetchOptions::new();
fo.remote_callbacks(get_callbacks(private_key_path));
let mut builder = git2::build::RepoBuilder::new();
builder.fetch_options(fo);
let repo = builder.clone(&url, &path)?;
return Ok(repo);
}
};
// add origin if does not exist
if let Err(_) = repo.find_remote("origin") {
repo.remote("origin", url)?;
}
// if current repo differs remove and fetch again
if repo.find_remote("origin")?.url().unwrap() != url {
fs::remove_dir_all(&path)?;
return repository_fetch(url, path, private_key_path);
}
Ok(repo)
}
pub fn differences_to_string(differences: &Vec<Difference>) -> String {
differences
.iter()
.map(|diff| format!("[{}] {}", diff.kind, diff.path))
.collect::<Vec<String>>()
.join("\n")
}
fn get_callbacks<'a>(private_key_path: &'a Path) -> RemoteCallbacks<'a> {
let mut callbacks = RemoteCallbacks::new();
callbacks.credentials(move |url, username_from_url, _allowed_types| {
debug!("Asking ssh credentials for: {:?}", url);
Cred::ssh_key(
username_from_url.unwrap_or("git"),
None,
private_key_path,
None,
)
});
callbacks
}
fn get_signature<'a>() -> Result<git2::Signature<'a>, git2::Error> {
let config = git2::Config::open_default()?;
let name = config.get_entry("user.name")?;
let name = name.value().unwrap();
let email = config.get_entry("user.email")?;
let email = email.value().unwrap();
info!("Using name: {} email: {} for signature", name, email);
git2::Signature::now(&name, &email)
}
|
pub fn day1(){
use std::fs;
let input = fs::read_to_string("inputs/d1").unwrap();
let values : Vec<i64> =
input
.lines()
.filter_map(|s| s.parse::<i64>().ok())
.collect();
// PART 1
// inefficient implementation: compare each integer with each other integer if they add up to 2020
// Complexity: O(n^2)
'outer: for (i1, value1) in values.iter().enumerate() {
for (i2, value2) in values.iter().enumerate() {
// check for identity and if they add up to 2020
if i1 != i2 && value1 + value2 == 2020 {
let multiplied = value1 * value2;
println!("Solution found: {} and {}. Multiplied: {}", value1, value2, multiplied);
// assume there is only one match. break the outer loop immediately.
break 'outer;
}
}
}
// more efficient idea for part 1:
// sort the vec: O(n log(n))
// move from the lowest value and the highest value to middle(?)
// look for target/2
// only compare the values below with those above
// does this work recursively? for something like a mergesort
// ex. adding up to 20:
// 3, 5, 7, 10, 13, 16, 19
// sort in place
// values.sort_unstable();
// sort manually into the compare categories?
// PART 2
// for the first attempt, reuse the inefficient implementation.
// This time, the complexity becomes O(n^3).
// Overall, the complexity is O(n^k) where k is the amount of numbers that have to be added.
// This does not scale well.
'outer_p2: for (i1, value1) in values.iter().enumerate() {
for (i2, value2) in values.iter().enumerate() {
for (i3, value3) in values.iter().enumerate() {
// check for identity and if they add up to 2020
// the identity check for i2 != i3 is implied by the other two checks
if i1 != i2 && i1 != i3 && value1 + value2 + value3 == 2020 {
let multiplied = value1 * value2 * value3;
println!("Solution found: {} and {} and {}. Multiplied: {}", value1, value2, value3, multiplied);
// assume there is only one match. break the outer loop immediately.
break 'outer_p2;
}
}
}
}
} |
#![feature(slice_patterns)]
pub mod eval;
pub mod lexer;
pub mod parser;
|
#![allow(unused_parens)]
use shorthand::ShortHand;
#[derive(ShortHand, Default)]
struct Example {
value: (u8),
}
fn main() { let _: (u8) = Example::default().value(); }
|
#![forbid(unsafe_code)]
#![warn(rust_2018_idioms, single_use_lifetimes, unreachable_pub)]
#![warn(clippy::pedantic)]
#![allow(
clippy::match_same_arms,
clippy::similar_names,
clippy::single_match_else,
clippy::struct_excessive_bools,
clippy::too_many_lines
)]
// Refs:
// - https://doc.rust-lang.org/nightly/rustc/instrument-coverage.html
// - https://llvm.org/docs/CommandGuide/llvm-profdata.html
// - https://llvm.org/docs/CommandGuide/llvm-cov.html
use std::{
collections::{BTreeSet, HashMap},
ffi::{OsStr, OsString},
io::{self, BufRead, Write},
path::Path,
time::SystemTime,
};
use anyhow::{bail, Context as _, Result};
use camino::{Utf8Path, Utf8PathBuf};
use cargo_config2::Flags;
use cargo_llvm_cov::json::{CodeCovJsonExport, LlvmCovJsonExport};
use regex::Regex;
use walkdir::WalkDir;
use crate::{
cargo::Workspace,
cli::{Args, ShowEnvOptions, Subcommand},
context::Context,
process::ProcessBuilder,
regex_vec::{RegexVec, RegexVecBuilder},
term::Coloring,
};
#[macro_use]
mod term;
#[macro_use]
mod process;
mod cargo;
mod clean;
mod cli;
mod context;
mod demangle;
mod env;
mod fs;
mod regex_vec;
fn main() {
term::init_coloring();
if let Err(e) = try_main() {
error!("{e:#}");
}
if term::error()
|| term::warn()
&& env::var_os("CARGO_LLVM_COV_DENY_WARNINGS").filter(|v| v == "true").is_some()
{
std::process::exit(1)
}
}
fn try_main() -> Result<()> {
let mut args = Args::parse()?;
term::verbose::set(args.verbose != 0);
match args.subcommand {
Subcommand::Demangle => demangle::run()?,
Subcommand::Clean => clean::run(&mut args)?,
Subcommand::ShowEnv => {
let cx = &Context::new(args)?;
let stdout = io::stdout();
let writer =
&mut ShowEnvWriter { writer: stdout.lock(), options: cx.args.show_env.clone() };
set_env(cx, writer, IsNextest(true))?; // Include envs for nextest.
writer.set("CARGO_LLVM_COV_TARGET_DIR", cx.ws.metadata.target_directory.as_str())?;
}
Subcommand::Report => {
let cx = &Context::new(args)?;
create_dirs(cx)?;
generate_report(cx)?;
}
Subcommand::Run => {
let cx = &Context::new(args)?;
clean::clean_partial(cx)?;
create_dirs(cx)?;
run_run(cx)?;
if !cx.args.cov.no_report {
generate_report(cx)?;
}
}
Subcommand::Nextest => {
let cx = &Context::new(args)?;
clean::clean_partial(cx)?;
create_dirs(cx)?;
run_nextest(cx)?;
if !cx.args.cov.no_report {
generate_report(cx)?;
}
}
Subcommand::None | Subcommand::Test => {
let cx = &Context::new(args)?;
clean::clean_partial(cx)?;
create_dirs(cx)?;
run_test(cx)?;
if !cx.args.cov.no_report {
generate_report(cx)?;
}
}
}
Ok(())
}
fn create_dirs(cx: &Context) -> Result<()> {
fs::create_dir_all(&cx.ws.target_dir)?;
if let Some(output_dir) = &cx.args.cov.output_dir {
fs::create_dir_all(output_dir)?;
if cx.args.cov.html {
fs::create_dir_all(output_dir.join("html"))?;
}
if cx.args.cov.text {
fs::create_dir_all(output_dir.join("text"))?;
}
}
if cx.args.doctests {
fs::create_dir_all(&cx.ws.doctests_dir)?;
}
Ok(())
}
trait EnvTarget {
fn set(&mut self, key: &str, value: &str) -> Result<()>;
fn unset(&mut self, key: &str) -> Result<()>;
}
impl EnvTarget for ProcessBuilder {
fn set(&mut self, key: &str, value: &str) -> Result<()> {
self.env(key, value);
Ok(())
}
fn unset(&mut self, key: &str) -> Result<()> {
self.env_remove(key);
Ok(())
}
}
struct ShowEnvWriter<W: io::Write> {
writer: W,
options: ShowEnvOptions,
}
impl<W: io::Write> EnvTarget for ShowEnvWriter<W> {
fn set(&mut self, key: &str, value: &str) -> Result<()> {
let prefix = if self.options.export_prefix { "export " } else { "" };
writeln!(self.writer, "{prefix}{key}={}", shell_escape::escape(value.into()))
.context("failed to write env to stdout")
}
fn unset(&mut self, key: &str) -> Result<()> {
if env::var_os(key).is_some() {
warn!("cannot unset environment variable `{key}`");
}
Ok(())
}
}
struct IsNextest(bool);
fn set_env(cx: &Context, env: &mut dyn EnvTarget, IsNextest(is_nextest): IsNextest) -> Result<()> {
fn push_common_flags(cx: &Context, flags: &mut Flags) {
if cx.ws.stable_coverage {
flags.push("-C");
flags.push("instrument-coverage");
} else {
flags.push("-Z");
flags.push("instrument-coverage");
if cfg!(windows) {
// `-C codegen-units=1` is needed to work around link error on windows
// https://github.com/rust-lang/rust/issues/85461
// https://github.com/microsoft/windows-rs/issues/1006#issuecomment-887789950
// This has been fixed in https://github.com/rust-lang/rust/pull/91470,
// but old nightly compilers still need this.
flags.push("-C");
flags.push("codegen-units=1");
}
}
// Workaround for https://github.com/rust-lang/rust/issues/91092.
// Unnecessary since https://github.com/rust-lang/rust/pull/111469.
if cx.ws.rustc_version.nightly && cx.ws.rustc_version.minor <= 71
|| !cx.ws.rustc_version.nightly && cx.ws.rustc_version.minor < 71
{
flags.push("-C");
flags.push("llvm-args=--instrprof-atomic-counter-update-all");
}
if !cx.args.cov.no_cfg_coverage {
flags.push("--cfg=coverage");
}
if cx.ws.rustc_version.nightly && !cx.args.cov.no_cfg_coverage_nightly {
flags.push("--cfg=coverage_nightly");
}
}
let mut llvm_profile_file_name = format!("{}-%p", cx.ws.name);
if is_nextest {
// https://github.com/taiki-e/cargo-llvm-cov/issues/258
// https://clang.llvm.org/docs/SourceBasedCodeCoverage.html#running-the-instrumented-program
// Select the number of threads that is the same as the one nextest uses by default here.
// https://github.com/nextest-rs/nextest/blob/c54694dfe7be016993983b5dedbcf2b50d4b1a6e/nextest-runner/src/config/test_threads.rs
// https://github.com/nextest-rs/nextest/blob/c54694dfe7be016993983b5dedbcf2b50d4b1a6e/nextest-runner/src/config/config_impl.rs#L30
// TODO: should we respect custom test-threads?
// - If the number of threads specified by the user is negative or
// less or equal to available cores, it should not really be a problem
// because it does not exceed the number of available cores.
// - Even if the number of threads specified by the user is greater than
// available cores, it is expected that the number of threads that can
// write simultaneously will not exceed the number of available cores.
llvm_profile_file_name.push_str(&format!(
"-%{}m",
std::thread::available_parallelism().map_or(1, usize::from)
));
} else {
llvm_profile_file_name.push_str("-%m");
}
llvm_profile_file_name.push_str(".profraw");
let llvm_profile_file = cx.ws.target_dir.join(llvm_profile_file_name);
let rustflags = &mut cx.ws.config.rustflags(&cx.ws.target_for_config)?.unwrap_or_default();
push_common_flags(cx, rustflags);
if cx.args.remap_path_prefix {
rustflags.push("--remap-path-prefix");
rustflags.push(format!("{}/=", cx.ws.metadata.workspace_root));
}
if cx.args.target.is_none() {
// https://github.com/dtolnay/trybuild/pull/121
// https://github.com/dtolnay/trybuild/issues/122
// https://github.com/dtolnay/trybuild/pull/123
rustflags.push("--cfg=trybuild_no_target");
}
// https://doc.rust-lang.org/nightly/rustc/instrument-coverage.html#including-doc-tests
let rustdocflags = &mut cx.ws.config.build.rustdocflags.clone();
if cx.args.doctests {
let rustdocflags = rustdocflags.get_or_insert_with(Flags::default);
push_common_flags(cx, rustdocflags);
rustdocflags.push("-Z");
rustdocflags.push("unstable-options");
rustdocflags.push("--persist-doctests");
rustdocflags.push(cx.ws.doctests_dir.as_str());
}
match (cx.args.coverage_target_only, &cx.args.target) {
(true, Some(coverage_target)) => {
env.set(
&format!("CARGO_TARGET_{}_RUSTFLAGS", target_u_upper(coverage_target)),
&rustflags.encode_space_separated()?,
)?;
env.unset("RUSTFLAGS")?;
env.unset("CARGO_ENCODED_RUSTFLAGS")?;
}
_ => {
// First, try with RUSTFLAGS because `nextest` subcommand sometimes doesn't work well with encoded flags.
if let Ok(v) = rustflags.encode_space_separated() {
env.set("RUSTFLAGS", &v)?;
env.unset("CARGO_ENCODED_RUSTFLAGS")?;
} else {
env.set("CARGO_ENCODED_RUSTFLAGS", &rustflags.encode()?)?;
}
}
}
if let Some(rustdocflags) = rustdocflags {
// First, try with RUSTDOCFLAGS because `nextest` subcommand sometimes doesn't work well with encoded flags.
if let Ok(v) = rustdocflags.encode_space_separated() {
env.set("RUSTDOCFLAGS", &v)?;
env.unset("CARGO_ENCODED_RUSTDOCFLAGS")?;
} else {
env.set("CARGO_ENCODED_RUSTDOCFLAGS", &rustdocflags.encode()?)?;
}
}
if cx.args.include_ffi {
// https://github.com/rust-lang/cc-rs/blob/1.0.73/src/lib.rs#L2347-L2365
// Environment variables that use hyphens are not available in many environments, so we ignore them for now.
let target_u = target_u_lower(cx.ws.target_for_config.triple());
let cflags_key = &format!("CFLAGS_{target_u}");
// Use std::env instead of crate::env to match cc-rs's behavior.
// https://github.com/rust-lang/cc-rs/blob/1.0.73/src/lib.rs#L2740
let mut cflags = match std::env::var(cflags_key) {
Ok(cflags) => cflags,
Err(_) => match std::env::var("TARGET_CFLAGS") {
Ok(cflags) => cflags,
Err(_) => std::env::var("CFLAGS").unwrap_or_default(),
},
};
let cxxflags_key = &format!("CXXFLAGS_{target_u}");
let mut cxxflags = match std::env::var(cxxflags_key) {
Ok(cxxflags) => cxxflags,
Err(_) => match std::env::var("TARGET_CXXFLAGS") {
Ok(cxxflags) => cxxflags,
Err(_) => std::env::var("CXXFLAGS").unwrap_or_default(),
},
};
let clang_flags = " -fprofile-instr-generate -fcoverage-mapping -fprofile-update=atomic";
cflags.push_str(clang_flags);
cxxflags.push_str(clang_flags);
env.set(cflags_key, &cflags)?;
env.set(cxxflags_key, &cxxflags)?;
}
env.set("LLVM_PROFILE_FILE", llvm_profile_file.as_str())?;
env.set("CARGO_LLVM_COV", "1")?;
if cx.args.subcommand == Subcommand::ShowEnv {
env.set("CARGO_LLVM_COV_SHOW_ENV", "1")?;
}
Ok(())
}
fn has_z_flag(args: &[String], name: &str) -> bool {
let mut iter = args.iter().map(String::as_str);
while let Some(mut arg) = iter.next() {
if arg == "-Z" {
arg = iter.next().unwrap();
} else if let Some(a) = arg.strip_prefix("-Z") {
arg = a;
} else {
continue;
}
if let Some(rest) = arg.strip_prefix(name) {
if rest.is_empty() || rest.starts_with('=') {
return true;
}
}
}
false
}
fn run_test(cx: &Context) -> Result<()> {
let mut cargo = cx.cargo();
set_env(cx, &mut cargo, IsNextest(false))?;
cargo.arg("test");
if cx.ws.need_doctest_in_workspace && !has_z_flag(&cx.args.cargo_args, "doctest-in-workspace") {
// https://github.com/rust-lang/cargo/issues/9427
cargo.arg("-Z");
cargo.arg("doctest-in-workspace");
}
if cx.args.ignore_run_fail {
{
let mut cargo = cargo.clone();
cargo.arg("--no-run");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
cargo.stdout_to_stderr().run()?;
} else {
// Capture output to prevent duplicate warnings from appearing in two runs.
cargo.run_with_output()?;
}
}
cargo.arg("--no-fail-fast");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
if let Err(e) = cargo.run() {
warn!("{e:#}");
}
} else {
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
cargo.run()?;
}
Ok(())
}
fn run_nextest(cx: &Context) -> Result<()> {
let mut cargo = cx.cargo();
set_env(cx, &mut cargo, IsNextest(true))?;
cargo.arg("nextest").arg("run");
if cx.args.ignore_run_fail {
{
let mut cargo = cargo.clone();
cargo.arg("--no-run");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
cargo.stdout_to_stderr().run()?;
} else {
// Capture output to prevent duplicate warnings from appearing in two runs.
cargo.run_with_output()?;
}
}
cargo.arg("--no-fail-fast");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
if let Err(e) = cargo.run() {
warn!("{e:#}");
}
} else {
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
cargo.run()?;
}
Ok(())
}
fn run_run(cx: &Context) -> Result<()> {
let mut cargo = cx.cargo();
set_env(cx, &mut cargo, IsNextest(false))?;
if cx.args.ignore_run_fail {
{
let mut cargo = cargo.clone();
cargo.arg("build");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
cargo.stdout_to_stderr().run()?;
} else {
// Capture output to prevent duplicate warnings from appearing in two runs.
cargo.run_with_output()?;
}
}
cargo.arg("run");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
if let Err(e) = cargo.run() {
warn!("{e:#}");
}
} else {
cargo.arg("run");
cargo::test_or_run_args(cx, &mut cargo);
if term::verbose() {
status!("Running", "{cargo}");
}
stdout_to_stderr(cx, &mut cargo);
cargo.run()?;
}
Ok(())
}
fn stdout_to_stderr(cx: &Context, cargo: &mut ProcessBuilder) {
if cx.args.cov.no_report
|| cx.args.cov.output_dir.is_some()
|| cx.args.cov.output_path.is_some()
{
// Do not redirect if unnecessary.
} else {
// Redirect stdout to stderr as the report is output to stdout by default.
cargo.stdout_to_stderr();
}
}
fn generate_report(cx: &Context) -> Result<()> {
merge_profraw(cx).context("failed to merge profile data")?;
let object_files = object_files(cx).context("failed to collect object files")?;
let ignore_filename_regex = ignore_filename_regex(cx);
let format = Format::from_args(cx);
format
.generate_report(cx, &object_files, ignore_filename_regex.as_deref())
.context("failed to generate report")?;
if cx.args.cov.fail_under_lines.is_some()
|| cx.args.cov.fail_uncovered_functions.is_some()
|| cx.args.cov.fail_uncovered_lines.is_some()
|| cx.args.cov.fail_uncovered_regions.is_some()
|| cx.args.cov.show_missing_lines
{
let format = Format::Json;
let json = format
.get_json(cx, &object_files, ignore_filename_regex.as_ref())
.context("failed to get json")?;
if let Some(fail_under_lines) = cx.args.cov.fail_under_lines {
// Handle --fail-under-lines.
let lines_percent = json.get_lines_percent().context("failed to get line coverage")?;
if lines_percent < fail_under_lines {
term::error::set(true);
}
}
if let Some(fail_uncovered_functions) = cx.args.cov.fail_uncovered_functions {
// Handle --fail-uncovered-functions.
let uncovered =
json.count_uncovered_functions().context("failed to count uncovered functions")?;
if uncovered > fail_uncovered_functions {
term::error::set(true);
}
}
if let Some(fail_uncovered_lines) = cx.args.cov.fail_uncovered_lines {
// Handle --fail-uncovered-lines.
let uncovered_files = json.get_uncovered_lines(ignore_filename_regex.as_deref());
let uncovered = uncovered_files
.iter()
.fold(0_u64, |uncovered, (_, lines)| uncovered + lines.len() as u64);
if uncovered > fail_uncovered_lines {
term::error::set(true);
}
}
if let Some(fail_uncovered_regions) = cx.args.cov.fail_uncovered_regions {
// Handle --fail-uncovered-regions.
let uncovered =
json.count_uncovered_regions().context("failed to count uncovered regions")?;
if uncovered > fail_uncovered_regions {
term::error::set(true);
}
}
if cx.args.cov.show_missing_lines {
// Handle --show-missing-lines.
let uncovered_files = json.get_uncovered_lines(ignore_filename_regex.as_deref());
if !uncovered_files.is_empty() {
let stdout = io::stdout();
let mut stdout = stdout.lock();
writeln!(stdout, "Uncovered Lines:")?;
for (file, lines) in &uncovered_files {
let lines: Vec<_> = lines.iter().map(ToString::to_string).collect();
writeln!(stdout, "{file}: {}", lines.join(", "))?;
}
stdout.flush()?;
}
}
}
if cx.args.cov.open {
let path = &cx.args.cov.output_dir.as_ref().unwrap().join("html/index.html");
status!("Opening", "{path}");
open_report(cx, path)?;
}
Ok(())
}
fn open_report(cx: &Context, path: &Utf8Path) -> Result<()> {
match &cx.ws.config.doc.browser {
Some(browser) => {
cmd!(&browser.path)
.args(&browser.args)
.arg(path)
.run()
.with_context(|| format!("couldn't open report with {}", browser.path.display()))?;
}
None => opener::open(path).context("couldn't open report")?,
}
Ok(())
}
fn merge_profraw(cx: &Context) -> Result<()> {
// Convert raw profile data.
let profraw_files = glob::glob(
Utf8Path::new(&glob::Pattern::escape(cx.ws.target_dir.as_str()))
.join(format!("{}-*.profraw", cx.ws.name))
.as_str(),
)?
.filter_map(Result::ok)
.collect::<Vec<_>>();
if profraw_files.is_empty() {
warn!(
"not found *.profraw files in {}; this may occur if target directory is accidentally \
cleared, or running report subcommand without running any tests or binaries",
cx.ws.target_dir
);
}
let mut input_files = String::new();
for path in profraw_files {
input_files.push_str(
path.to_str().with_context(|| format!("{path:?} contains invalid utf-8 data"))?,
);
input_files.push('\n');
}
let input_files_path = &cx.ws.target_dir.join(format!("{}-profraw-list", cx.ws.name));
fs::write(input_files_path, input_files)?;
let mut cmd = cx.process(&cx.llvm_profdata);
cmd.args(["merge", "-sparse"])
.arg("-f")
.arg(input_files_path)
.arg("-o")
.arg(&cx.ws.profdata_file);
if let Some(mode) = &cx.args.cov.failure_mode {
cmd.arg(format!("-failure-mode={mode}"));
}
if let Some(flags) = &cx.llvm_profdata_flags {
cmd.args(flags.split(' ').filter(|s| !s.trim().is_empty()));
}
if term::verbose() {
status!("Running", "{cmd}");
}
cmd.stdout_to_stderr().run()?;
Ok(())
}
fn object_files(cx: &Context) -> Result<Vec<OsString>> {
fn walk_target_dir<'a>(
cx: &'a Context,
target_dir: &Utf8Path,
) -> impl Iterator<Item = walkdir::DirEntry> + 'a {
WalkDir::new(target_dir)
.into_iter()
.filter_entry(move |e| {
let p = e.path();
if p.is_dir() {
if p.file_name()
.map_or(false, |f| f == "incremental" || f == ".fingerprint" || f == "out")
{
return false;
}
} else if let Some(stem) = p.file_stem() {
let stem = stem.to_string_lossy();
if stem == "build-script-build" || stem.starts_with("build_script_build-") {
let p = p.parent().unwrap();
if p.parent().unwrap().file_name().unwrap() == "build" {
if cx.args.cov.include_build_script {
let dir = p.file_name().unwrap().to_string_lossy();
if !cx.build_script_re.is_match(&dir) {
return false;
}
} else {
return false;
}
}
}
}
true
})
.filter_map(Result::ok)
}
fn is_object(cx: &Context, f: &Path) -> bool {
is_executable::is_executable(f)
|| cx.args.target.as_ref().map_or(cfg!(windows), |t| t.contains("-windows"))
&& f.extension() == Some(OsStr::new("dll"))
}
let re = Targets::new(&cx.ws).pkg_hash_re()?;
let mut files = vec![];
let mut searched_dir = String::new();
// To support testing binary crate like tests that use the CARGO_BIN_EXE
// environment variable, pass all compiled executables.
// This is not the ideal way, but the way unstable book says it is cannot support them.
// https://doc.rust-lang.org/nightly/rustc/instrument-coverage.html#tips-for-listing-the-binaries-automatically
let mut target_dir = cx.ws.target_dir.clone();
// https://doc.rust-lang.org/nightly/cargo/guide/build-cache.html
if let Some(target) = &cx.args.target {
target_dir.push(target);
}
// https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles
let profile = match cx.args.profile.as_deref() {
None if cx.args.release => "release",
None => "debug",
Some("release" | "bench") => "release",
Some("dev" | "test") => "debug",
Some(p) => p,
};
target_dir.push(profile);
for f in walk_target_dir(cx, &target_dir) {
let f = f.path();
if is_object(cx, f) {
if let Some(file_stem) = fs::file_stem_recursive(f).unwrap().to_str() {
if re.is_match(file_stem) {
files.push(make_relative(cx, f).to_owned().into_os_string());
}
}
}
}
searched_dir.push_str(target_dir.as_str());
if cx.args.doctests {
for f in glob::glob(
Utf8Path::new(&glob::Pattern::escape(cx.ws.doctests_dir.as_str()))
.join("*/rust_out")
.as_str(),
)?
.filter_map(Result::ok)
{
if is_object(cx, &f) {
files.push(make_relative(cx, &f).to_owned().into_os_string());
}
}
searched_dir.push(',');
searched_dir.push_str(cx.ws.doctests_dir.as_str());
}
// trybuild
let mut trybuild_target_dir = cx.ws.trybuild_target_dir();
if let Some(target) = &cx.args.target {
trybuild_target_dir.push(target);
}
// Currently, trybuild always use debug build.
trybuild_target_dir.push("debug");
if trybuild_target_dir.is_dir() {
let mut trybuild_targets = vec![];
for metadata in trybuild_metadata(&cx.ws.metadata.target_directory)? {
for package in metadata.packages {
for target in package.targets {
trybuild_targets.push(target.name);
}
}
}
if !trybuild_targets.is_empty() {
let re =
Regex::new(&format!("^({})(-[0-9a-f]+)?$", trybuild_targets.join("|"))).unwrap();
for entry in walk_target_dir(cx, &trybuild_target_dir) {
let path = make_relative(cx, entry.path());
if let Some(file_stem) = fs::file_stem_recursive(path).unwrap().to_str() {
if re.is_match(file_stem) {
continue;
}
}
if is_object(cx, path) {
files.push(path.to_owned().into_os_string());
}
}
searched_dir.push(',');
searched_dir.push_str(trybuild_target_dir.as_str());
}
}
// This sort is necessary to make the result of `llvm-cov show` match between macos and linux.
files.sort_unstable();
if files.is_empty() {
warn!(
"not found object files (searched directories: {searched_dir}); this may occur if \
show-env subcommand is used incorrectly (see docs or other warnings), or unsupported \
commands such as nextest archive are used",
);
}
Ok(files)
}
struct Targets {
packages: BTreeSet<String>,
targets: BTreeSet<String>,
}
impl Targets {
fn new(ws: &Workspace) -> Self {
let mut packages = BTreeSet::new();
let mut targets = BTreeSet::new();
for id in &ws.metadata.workspace_members {
let pkg = &ws.metadata[id];
packages.insert(pkg.name.clone());
for t in &pkg.targets {
targets.insert(t.name.clone());
}
}
Self { packages, targets }
}
fn pkg_hash_re(&self) -> Result<RegexVec> {
let mut re = RegexVecBuilder::new("^(lib)?(", ")(-[0-9a-f]+)?$");
for pkg in &self.packages {
re.or(&pkg.replace('-', "(-|_)"));
}
for t in &self.targets {
re.or(&t.replace('-', "(-|_)"));
}
re.build()
}
}
/// Collects metadata for packages generated by trybuild. If the trybuild test
/// directory is not found, it returns an empty vector.
fn trybuild_metadata(target_dir: &Utf8Path) -> Result<Vec<cargo_metadata::Metadata>> {
// https://github.com/dtolnay/trybuild/pull/219
let mut trybuild_dir = target_dir.join("tests").join("trybuild");
if !trybuild_dir.is_dir() {
trybuild_dir.pop();
if !trybuild_dir.is_dir() {
return Ok(vec![]);
}
}
let mut metadata = vec![];
for entry in fs::read_dir(trybuild_dir)?.filter_map(Result::ok) {
let manifest_path = entry.path().join("Cargo.toml");
if !manifest_path.is_file() {
continue;
}
metadata.push(
cargo_metadata::MetadataCommand::new().manifest_path(manifest_path).no_deps().exec()?,
);
}
Ok(metadata)
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum Format {
/// `llvm-cov report`
None,
/// `llvm-cov export -format=text`
Json,
/// `llvm-cov export -format=lcov`
LCov,
/// `llvm-cov export -format=lcov` later converted to XML
Cobertura,
/// `llvm-cov show -format=lcov` later converted to Codecov JSON
Codecov,
/// `llvm-cov show -format=text`
Text,
/// `llvm-cov show -format=html`
Html,
}
impl Format {
fn from_args(cx: &Context) -> Self {
if cx.args.cov.json {
Self::Json
} else if cx.args.cov.lcov {
Self::LCov
} else if cx.args.cov.cobertura {
Self::Cobertura
} else if cx.args.cov.codecov {
Self::Codecov
} else if cx.args.cov.text {
Self::Text
} else if cx.args.cov.html {
Self::Html
} else {
Self::None
}
}
const fn llvm_cov_args(self) -> &'static [&'static str] {
match self {
Self::None => &["report"],
Self::Json | Self::Codecov => &["export", "-format=text"],
Self::LCov | Self::Cobertura => &["export", "-format=lcov"],
Self::Text => &["show", "-format=text"],
Self::Html => &["show", "-format=html"],
}
}
fn use_color(self, cx: &Context) -> Option<&'static str> {
if matches!(self, Self::Json | Self::LCov | Self::Html) {
// `llvm-cov export` doesn't have `-use-color` flag.
// https://llvm.org/docs/CommandGuide/llvm-cov.html#llvm-cov-export
// Color output cannot be disabled when generating html.
return None;
}
if self == Self::Text && cx.args.cov.output_dir.is_some() {
return Some("-use-color=0");
}
match cx.args.color {
Some(Coloring::Auto) | None => None,
Some(Coloring::Always) => Some("-use-color=1"),
Some(Coloring::Never) => Some("-use-color=0"),
}
}
fn generate_report(
self,
cx: &Context,
object_files: &[OsString],
ignore_filename_regex: Option<&str>,
) -> Result<()> {
let mut cmd = cx.process(&cx.llvm_cov);
cmd.args(self.llvm_cov_args());
cmd.args(self.use_color(cx));
cmd.arg(format!("-instr-profile={}", cx.ws.profdata_file));
cmd.args(object_files.iter().flat_map(|f| [OsStr::new("-object"), f]));
if let Some(ignore_filename_regex) = ignore_filename_regex {
cmd.arg("-ignore-filename-regex");
cmd.arg(ignore_filename_regex);
}
match self {
Self::Text | Self::Html => {
cmd.args([
&format!("-show-instantiations={}", !cx.args.cov.hide_instantiations),
"-show-line-counts-or-regions",
"-show-expansions",
"-show-branches=count",
&format!("-Xdemangler={}", cx.current_exe.display()),
"-Xdemangler=llvm-cov",
"-Xdemangler=demangle",
]);
if let Some(output_dir) = &cx.args.cov.output_dir {
if self == Self::Html {
cmd.arg(format!("-output-dir={}", output_dir.join("html")));
} else {
cmd.arg(format!("-output-dir={}", output_dir.join("text")));
}
}
}
Self::Json | Self::LCov | Self::Cobertura | Self::Codecov => {
if cx.args.cov.summary_only {
cmd.arg("-summary-only");
}
}
Self::None => {}
}
if let Some(flags) = &cx.llvm_cov_flags {
cmd.args(flags.split(' ').filter(|s| !s.trim().is_empty()));
}
if cx.args.cov.cobertura {
if term::verbose() {
status!("Running", "{cmd}");
}
let lcov = cmd.read()?;
// Convert to XML
let cdata = lcov2cobertura::parse_lines(
lcov.as_bytes().lines(),
&cx.ws.metadata.workspace_root,
&[],
)?;
let demangler = lcov2cobertura::RustDemangler::new();
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.context("SystemTime before UNIX EPOCH!")?
.as_secs();
let out = lcov2cobertura::coverage_to_string(&cdata, now, demangler)?;
if let Some(output_path) = &cx.args.cov.output_path {
fs::write(output_path, out)?;
eprintln!();
status!("Finished", "report saved to {output_path}");
} else {
// write XML to stdout
println!("{out}");
}
return Ok(());
};
if cx.args.cov.codecov {
if term::verbose() {
status!("Running", "{cmd}");
}
let cov = cmd.read()?;
let cov: LlvmCovJsonExport = serde_json::from_str(&cov)?;
let cov = CodeCovJsonExport::from_llvm_cov_json_export(cov, ignore_filename_regex);
let out = serde_json::to_string(&cov)?;
if let Some(output_path) = &cx.args.cov.output_path {
fs::write(output_path, out)?;
eprintln!();
status!("Finished", "report saved to {output_path}");
} else {
// write JSON to stdout
println!("{out}");
}
return Ok(());
};
if let Some(output_path) = &cx.args.cov.output_path {
if term::verbose() {
status!("Running", "{cmd}");
}
let out = cmd.read()?;
if self == Self::Json {
let mut cov = serde_json::from_str::<LlvmCovJsonExport>(&out)?;
cov.inject(cx.ws.current_manifest.clone());
fs::write(output_path, serde_json::to_string(&cov)?)?;
} else {
fs::write(output_path, out)?;
}
eprintln!();
status!("Finished", "report saved to {output_path}");
return Ok(());
}
if term::verbose() {
status!("Running", "{cmd}");
}
if self == Self::Json {
let out = cmd.read()?;
let mut cov = serde_json::from_str::<LlvmCovJsonExport>(&out)?;
cov.inject(cx.ws.current_manifest.clone());
let stdout = std::io::stdout().lock();
serde_json::to_writer(stdout, &cov)?;
} else {
cmd.run()?;
}
if matches!(self, Self::Html | Self::Text) {
if let Some(output_dir) = &cx.args.cov.output_dir {
eprintln!();
if self == Self::Html {
status!("Finished", "report saved to {}", output_dir.join("html"));
} else {
status!("Finished", "report saved to {}", output_dir.join("text"));
}
}
}
Ok(())
}
/// Generates JSON to perform further analysis on it.
fn get_json(
self,
cx: &Context,
object_files: &[OsString],
ignore_filename_regex: Option<&String>,
) -> Result<LlvmCovJsonExport> {
if let Self::Json = self {
} else {
bail!("requested JSON for non-JSON type");
}
let mut cmd = cx.process(&cx.llvm_cov);
cmd.args(self.llvm_cov_args());
cmd.arg(format!("-instr-profile={}", cx.ws.profdata_file));
cmd.args(object_files.iter().flat_map(|f| [OsStr::new("-object"), f]));
if let Some(ignore_filename_regex) = ignore_filename_regex {
cmd.arg("-ignore-filename-regex");
cmd.arg(ignore_filename_regex);
}
if term::verbose() {
status!("Running", "{cmd}");
}
let cmd_out = cmd.read()?;
let json = serde_json::from_str::<LlvmCovJsonExport>(&cmd_out)
.context("failed to parse json from llvm-cov")?;
Ok(json)
}
}
fn ignore_filename_regex(cx: &Context) -> Option<String> {
// On windows, we should escape the separator.
const SEPARATOR: &str = if cfg!(windows) { "\\\\" } else { "/" };
#[derive(Default)]
struct Out(String);
impl Out {
fn push(&mut self, s: impl AsRef<str>) {
if !self.0.is_empty() {
self.0.push('|');
}
self.0.push_str(s.as_ref());
}
fn push_abs_path(&mut self, path: impl AsRef<Path>) {
let path = regex::escape(&path.as_ref().to_string_lossy());
let path = format!("^{path}($|{SEPARATOR})");
self.push(path);
}
}
let mut out = Out::default();
if let Some(ignore_filename) = &cx.args.cov.ignore_filename_regex {
out.push(ignore_filename);
}
if !cx.args.cov.disable_default_ignore_filename_regex {
// TODO: Should we use the actual target path instead of using `tests|examples|benches`?
// We may have a directory like tests/support, so maybe we need both?
if cx.args.remap_path_prefix {
out.push(format!(
r"(^|{SEPARATOR})(rustc{SEPARATOR}[0-9a-f]+|tests|examples|benches){SEPARATOR}"
));
} else {
out.push(format!(
r"{SEPARATOR}rustc{SEPARATOR}[0-9a-f]+{SEPARATOR}|^{}({SEPARATOR}.*)?{SEPARATOR}(tests|examples|benches){SEPARATOR}",
regex::escape(cx.ws.metadata.workspace_root.as_str())
));
}
out.push_abs_path(&cx.ws.target_dir);
if cx.args.remap_path_prefix {
if let Some(path) = home::home_dir() {
out.push_abs_path(path);
}
}
if let Ok(path) = home::cargo_home() {
let path = regex::escape(&path.as_os_str().to_string_lossy());
let path = format!("^{path}{SEPARATOR}(registry|git){SEPARATOR}");
out.push(path);
}
if let Ok(path) = home::rustup_home() {
out.push_abs_path(path.join("toolchains"));
}
for path in resolve_excluded_paths(cx) {
out.push_abs_path(path);
}
}
if out.0.is_empty() {
None
} else {
Some(out.0)
}
}
fn resolve_excluded_paths(cx: &Context) -> Vec<Utf8PathBuf> {
let excluded: Vec<_> = cx
.workspace_members
.excluded
.iter()
.map(|id| cx.ws.metadata[id].manifest_path.parent().unwrap())
.collect();
let included = cx
.workspace_members
.included
.iter()
.map(|id| cx.ws.metadata[id].manifest_path.parent().unwrap());
let mut excluded_path = vec![];
let mut contains: HashMap<&Utf8Path, Vec<_>> = HashMap::new();
for included in included {
for &excluded in excluded.iter().filter(|e| included.starts_with(e)) {
if let Some(v) = contains.get_mut(&excluded) {
v.push(included);
} else {
contains.insert(excluded, vec![included]);
}
}
}
if contains.is_empty() {
for &manifest_dir in &excluded {
let package_path =
manifest_dir.strip_prefix(&cx.ws.metadata.workspace_root).unwrap_or(manifest_dir);
excluded_path.push(package_path.to_owned());
}
return excluded_path;
}
for &excluded in &excluded {
let included = match contains.get(&excluded) {
Some(included) => included,
None => {
let package_path =
excluded.strip_prefix(&cx.ws.metadata.workspace_root).unwrap_or(excluded);
excluded_path.push(package_path.to_owned());
continue;
}
};
for _ in WalkDir::new(excluded).into_iter().filter_entry(|e| {
let p = e.path();
if !p.is_dir() {
if p.extension().map_or(false, |e| e == "rs") {
let p = p.strip_prefix(&cx.ws.metadata.workspace_root).unwrap_or(p);
excluded_path.push(p.to_owned().try_into().unwrap());
}
return false;
}
let mut contains = false;
for included in included {
if included.starts_with(p) {
if p.starts_with(included) {
return false;
}
contains = true;
}
}
if contains {
// continue to walk
return true;
}
let p = p.strip_prefix(&cx.ws.metadata.workspace_root).unwrap_or(p);
excluded_path.push(p.to_owned().try_into().unwrap());
false
}) {}
}
excluded_path
}
fn target_u_lower(target: &str) -> String {
target.replace(['-', '.'], "_")
}
fn target_u_upper(target: &str) -> String {
let mut target = target_u_lower(target);
target.make_ascii_uppercase();
target
}
/// Make the path relative if it's a descendent of the current working dir, otherwise just return
/// the original path
fn make_relative<'a>(cx: &Context, p: &'a Path) -> &'a Path {
p.strip_prefix(&cx.current_dir).unwrap_or(p)
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - DCACHE control register"]
pub cr: CR,
#[doc = "0x04 - DCACHE status register"]
pub sr: SR,
#[doc = "0x08 - DCACHE interrupt enable register"]
pub ier: IER,
#[doc = "0x0c - DCACHE flag clear register"]
pub fcr: FCR,
#[doc = "0x10 - DCACHE read-hit monitor register"]
pub rhmonr: RHMONR,
#[doc = "0x14 - DCACHE read-miss monitor register"]
pub rmmonr: RMMONR,
_reserved6: [u8; 0x08],
#[doc = "0x20 - DCACHE write-hit monitor register"]
pub whmonr: WHMONR,
#[doc = "0x24 - DCACHE write-miss monitor register"]
pub wmmonr: WMMONR,
#[doc = "0x28 - DCACHE command range start address register"]
pub cmdrsaddrr: CMDRSADDRR,
#[doc = "0x2c - DCACHE command range end address register"]
pub cmdreaddrr: CMDREADDRR,
}
#[doc = "CR (rw) register accessor: DCACHE control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "DCACHE control register"]
pub mod cr;
#[doc = "SR (r) register accessor: DCACHE status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`]
module"]
pub type SR = crate::Reg<sr::SR_SPEC>;
#[doc = "DCACHE status register"]
pub mod sr;
#[doc = "IER (rw) register accessor: DCACHE interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ier`]
module"]
pub type IER = crate::Reg<ier::IER_SPEC>;
#[doc = "DCACHE interrupt enable register"]
pub mod ier;
#[doc = "FCR (w) register accessor: DCACHE flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fcr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fcr`]
module"]
pub type FCR = crate::Reg<fcr::FCR_SPEC>;
#[doc = "DCACHE flag clear register"]
pub mod fcr;
#[doc = "RHMONR (r) register accessor: DCACHE read-hit monitor register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rhmonr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rhmonr`]
module"]
pub type RHMONR = crate::Reg<rhmonr::RHMONR_SPEC>;
#[doc = "DCACHE read-hit monitor register"]
pub mod rhmonr;
#[doc = "RMMONR (r) register accessor: DCACHE read-miss monitor register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rmmonr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rmmonr`]
module"]
pub type RMMONR = crate::Reg<rmmonr::RMMONR_SPEC>;
#[doc = "DCACHE read-miss monitor register"]
pub mod rmmonr;
#[doc = "WHMONR (r) register accessor: DCACHE write-hit monitor register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`whmonr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`whmonr`]
module"]
pub type WHMONR = crate::Reg<whmonr::WHMONR_SPEC>;
#[doc = "DCACHE write-hit monitor register"]
pub mod whmonr;
#[doc = "WMMONR (r) register accessor: DCACHE write-miss monitor register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wmmonr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`wmmonr`]
module"]
pub type WMMONR = crate::Reg<wmmonr::WMMONR_SPEC>;
#[doc = "DCACHE write-miss monitor register"]
pub mod wmmonr;
#[doc = "CMDRSADDRR (rw) register accessor: DCACHE command range start address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cmdrsaddrr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cmdrsaddrr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cmdrsaddrr`]
module"]
pub type CMDRSADDRR = crate::Reg<cmdrsaddrr::CMDRSADDRR_SPEC>;
#[doc = "DCACHE command range start address register"]
pub mod cmdrsaddrr;
#[doc = "CMDREADDRR (rw) register accessor: DCACHE command range end address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cmdreaddrr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cmdreaddrr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cmdreaddrr`]
module"]
pub type CMDREADDRR = crate::Reg<cmdreaddrr::CMDREADDRR_SPEC>;
#[doc = "DCACHE command range end address register"]
pub mod cmdreaddrr;
|
use core::{iter, ops::Deref as _};
use anyhow::{bail, ensure, Error, Result};
use error_utils::{DebugAsError, SyncError};
use eth2_libp2p::{
rpc::{
methods::{BlocksByRangeRequest, BlocksByRootRequest, GoodbyeReason, StatusMessage},
ErrorMessage, RPCError, RPCErrorResponse, RPCRequest, RPCResponse, RequestId,
ResponseTermination,
},
Libp2pEvent, MessageId, PeerId, PubsubMessage, RPCEvent, Service, Topic, TopicHash,
};
use eth2_network::{Network, Networked, Status};
use ethereum_types::H32;
use fmt_extra::{AsciiStr, Hs};
use futures::{
future, try_ready,
unsync::mpsc::{self, UnboundedReceiver, UnboundedSender},
Async, Future, Poll, Stream as _,
};
use helper_functions::misc;
use log::info;
use slog::{o, Drain as _, Logger};
use slog_stdlog::StdLog;
use ssz::{Decode as _, Encode as _};
use thiserror::Error;
use types::{
config::Config,
primitives::Version,
types::{Attestation, BeaconBlock},
};
pub use eth2_libp2p::NetworkConfig;
pub use qutex::{Guard, Qutex};
#[derive(Debug, Error)]
enum EventHandlerError {
#[error("error while sending message to peer {peer_id}: {rpc_error:?}")]
RpcError {
peer_id: PeerId,
rpc_error: RPCError,
},
#[error(
"peer {peer_id} sent a response to BlocksByRoot without request: {}",
Hs(response_bytes)
)]
UnexpectedBlocksByRootResponse {
peer_id: PeerId,
response_bytes: Vec<u8>,
},
#[error("peer {peer_id} terminated BlocksByRoot response stream sent without request")]
UnexpectedBlocksByRootTermination { peer_id: PeerId },
#[error("peer {peer_id} rejected the request: {}", AsciiStr(&error_message.error_message))]
InvalidRequest {
peer_id: PeerId,
error_message: ErrorMessage,
},
#[error("peer {peer_id} encountered an error: {}", AsciiStr(&error_message.error_message))]
ServerError {
peer_id: PeerId,
error_message: ErrorMessage,
},
#[error(
"peer {peer_id} responded with unknown response code: {}",
AsciiStr(&error_message.error_message)
)]
UnknownResponse {
peer_id: PeerId,
error_message: ErrorMessage,
},
#[error("unsupported gossiped object type (message_id: {message_id:?}, peer_id: {peer_id}, topics: {topics:?}, message: {message:?})")]
UnsupportedGossipedObjectType {
message_id: MessageId,
// `eth2-libp2p` calls this `source` rather than `peer_id`, but we cannot use that name
// because `thiserror` treats `source` fields specially and provides no way to opt out.
peer_id: PeerId,
topics: Vec<TopicHash>,
message: PubsubMessage,
},
#[error("slot step is zero")]
SlotStepIsZero,
#[error("slot difference overflowed ({count} * {step})")]
SlotDifferenceOverflow { count: u64, step: u64 },
#[error("end slot overflowed ({start_slot} + {difference})")]
EndSlotOverflow { start_slot: u64, difference: u64 },
#[error(
"local fork version ({}) is different from remote fork version ({})",
H32(*local),
H32(*remote)
)]
ForkVersionMismatch { local: Version, remote: Version },
#[error("ran out of request IDs")]
RequestIdsExhausted,
}
#[allow(clippy::large_enum_variant)]
enum Gossip<C: Config> {
BeaconBlock(BeaconBlock<C>),
BeaconAttestation(Attestation<C>),
}
pub struct Sender<C: Config>(UnboundedSender<Gossip<C>>);
// The implementation of `<EventHandler<C, N> as Future>::poll` relies on `UnboundedReceiver` not
// panicking if it is polled after being exhausted.
pub struct Receiver<C: Config>(UnboundedReceiver<Gossip<C>>);
impl<C: Config> Network<C> for Sender<C> {
fn publish_beacon_block(&self, beacon_block: BeaconBlock<C>) -> Result<()> {
self.0
.unbounded_send(Gossip::BeaconBlock(beacon_block))
.map_err(Into::into)
}
fn publish_beacon_attestation(&self, attestation: Attestation<C>) -> Result<()> {
self.0
.unbounded_send(Gossip::BeaconAttestation(attestation))
.map_err(Into::into)
}
}
type EventFuture = Box<dyn Future<Item = (), Error = Error>>;
struct EventHandler<C: Config, N> {
networked: Qutex<N>,
networked_receiver: Receiver<C>,
// Wrapping `Service` in a `Qutex` is not strictly necessary but simplifies the types of
// `EventHandler.in_progress` and `EventHandler::handle_libp2p_event`.
service: Qutex<Service>,
next_request_id: usize,
in_progress: Option<EventFuture>,
}
impl<C: Config, N: Networked<C>> EventHandler<C, N> {
fn handle_libp2p_event(&mut self, libp2p_event: Libp2pEvent) -> Result<EventFuture> {
match libp2p_event {
Libp2pEvent::RPC(
peer_id,
RPCEvent::Request(request_id, RPCRequest::Status(status_message)),
) => self.handle_status_request(peer_id, request_id, &status_message),
Libp2pEvent::RPC(peer_id, RPCEvent::Request(_, RPCRequest::Goodbye(reason))) => {
self.handle_goodbye_request(&peer_id, &reason)
}
Libp2pEvent::RPC(
peer_id,
RPCEvent::Request(request_id, RPCRequest::BlocksByRange(request)),
) => self.handle_blocks_by_range_request(peer_id, request_id, &request),
Libp2pEvent::RPC(
peer_id,
RPCEvent::Request(request_id, RPCRequest::BlocksByRoot(request)),
) => self.handle_blocks_by_root_request(peer_id, request_id, request),
Libp2pEvent::RPC(peer_id, RPCEvent::Response(_, response)) => {
self.handle_rpc_response(peer_id, response)
}
Libp2pEvent::RPC(peer_id, RPCEvent::Error(_, rpc_error)) => {
bail!(EventHandlerError::RpcError { peer_id, rpc_error });
}
Libp2pEvent::PeerDialed(peer_id) => self.handle_peer_dialed(peer_id),
Libp2pEvent::PeerDisconnected(peer_id) => {
info!("peer {} disconnected", peer_id);
Ok(Box::new(future::ok(())))
}
Libp2pEvent::PubsubMessage {
id,
source,
topics,
message,
} => self.handle_pubsub_message(id, source, topics, message),
Libp2pEvent::PeerSubscribed(peer_id, topic) => {
info!("subscribed to peer {} for topic {}", peer_id, topic);
Ok(Box::new(future::ok(())))
}
}
}
fn handle_status_request(
&mut self,
peer_id: PeerId,
status_request_id: RequestId,
status_message: &StatusMessage,
) -> Result<EventFuture> {
let remote = status_message_to_status(status_message);
info!(
"received Status request (peer_id: {}, remote: {:?})",
peer_id, remote,
);
let blocks_by_range_request_id = self.request_id()?;
Ok(Box::new(
self.lock_networked().join(self.lock_service()).and_then(
move |(networked, mut service)| {
let local = get_and_check_status(networked.deref(), remote)?;
info!(
"sending Status response (peer_id: {}, local: {:?})",
peer_id, local,
);
service.swarm.send_rpc(
peer_id.clone(),
RPCEvent::Response(
status_request_id,
RPCErrorResponse::Success(RPCResponse::Status(
status_into_status_message(local),
)),
),
);
compare_status_and_request_blocks::<C>(
local,
remote,
service,
peer_id,
blocks_by_range_request_id,
);
Ok(())
},
),
))
}
fn handle_goodbye_request(
&self,
peer_id: &PeerId,
reason: &GoodbyeReason,
) -> Result<EventFuture> {
info!(
"received Goodbye (peer_id: {}, reason: {})",
peer_id, reason,
);
Ok(Box::new(future::ok(())))
}
fn handle_blocks_by_range_request(
&self,
peer_id: PeerId,
request_id: RequestId,
request: &BlocksByRangeRequest,
) -> Result<EventFuture> {
info!(
"received BlocksByRange request (peer_id: {}, request: {:?})",
peer_id, request,
);
let BlocksByRangeRequest {
head_block_root,
start_slot,
count,
step,
} = *request;
ensure!(step != 0, EventHandlerError::SlotStepIsZero);
let difference = count
.checked_mul(step)
.ok_or_else(|| EventHandlerError::SlotDifferenceOverflow { count, step })?;
let end_slot = start_slot.checked_add(difference).ok_or_else(|| {
EventHandlerError::EndSlotOverflow {
start_slot,
difference,
}
})?;
Ok(Box::new(
self.lock_networked()
.join(self.lock_service())
.map(move |(networked, mut service)| {
// It is unclear what should be done in the case that no blocks are found.
// The [specification] implies a `ServerError` should be sent in response.
// It would be easier for both the server and the client to terminate the
// stream immediately. Lighthouse does exactly that. Given that the notion
// of response chunks was [introduced] by a Lighthouse developer, that may
// have been the intended meaning.
//
// [specification]: https://github.com/ethereum/eth2.0-specs/blob/19fa53709a247df5279f063179cc5e317ad57041/specs/networking/p2p-interface.md
// [introduced]: https://github.com/ethereum/eth2.0-specs/pull/1404
iter::successors(networked.get_beacon_block(head_block_root), |previous| {
networked.get_beacon_block(previous.parent_root)
})
.skip_while(|block| end_slot < block.slot)
.take_while(|block| start_slot <= block.slot)
.filter(|block| (block.slot - start_slot) % step == 0)
.for_each(|block| {
info!(
"sending BlocksByRange response chunk (peer_id: {}, block: {:?})",
peer_id, block,
);
service.swarm.send_rpc(
peer_id.clone(),
RPCEvent::Response(
request_id,
RPCErrorResponse::Success(RPCResponse::BlocksByRange(
block.as_ssz_bytes(),
)),
),
);
});
info!("terminating BlocksByRange response stream");
service.swarm.send_rpc(
peer_id,
RPCEvent::Response(
request_id,
RPCErrorResponse::StreamTermination(ResponseTermination::BlocksByRange),
),
);
}),
))
}
fn handle_blocks_by_root_request(
&self,
peer_id: PeerId,
request_id: RequestId,
request: BlocksByRootRequest,
) -> Result<EventFuture> {
let block_roots = request.block_roots;
info!(
"received BlocksByRoot request (peer_id: {}, block_roots: {:?})",
peer_id, block_roots,
);
Ok(Box::new(
self.lock_networked()
.join(self.lock_service())
.map(move |(networked, mut service)| {
// It is unclear what should be done in the case that no blocks are found.
// The [specification] implies a `ServerError` should be sent in response.
// It would be easier for both the server and the client to terminate the
// stream immediately. Lighthouse does exactly that. Given that the notion
// of response chunks was [introduced] by a Lighthouse developer, that may
// have been the intended meaning.
//
// [specification]: https://github.com/ethereum/eth2.0-specs/blob/19fa53709a247df5279f063179cc5e317ad57041/specs/networking/p2p-interface.md
// [introduced]: https://github.com/ethereum/eth2.0-specs/pull/1404
for root in block_roots {
if let Some(block) = networked.get_beacon_block(root) {
info!(
"sending BlocksByRoot response chunk (peer_id: {}, block: {:?})",
peer_id, block,
);
service.swarm.send_rpc(
peer_id.clone(),
RPCEvent::Response(
request_id,
RPCErrorResponse::Success(RPCResponse::BlocksByRoot(
block.as_ssz_bytes(),
)),
),
);
}
}
info!("terminating BlocksByRoot response stream");
service.swarm.send_rpc(
peer_id,
RPCEvent::Response(
request_id,
RPCErrorResponse::StreamTermination(ResponseTermination::BlocksByRoot),
),
);
}),
))
}
fn handle_rpc_response(
&mut self,
peer_id: PeerId,
response: RPCErrorResponse,
) -> Result<EventFuture> {
match response {
RPCErrorResponse::Success(RPCResponse::Status(status_message)) => {
let remote = status_message_to_status(&status_message);
info!(
"received Status response (peer_id: {}, remote: {:?})",
peer_id, remote,
);
let request_id = self.request_id()?;
Ok(Box::new(
self.lock_networked().join(self.lock_service()).and_then(
move |(networked, service)| {
let local = get_and_check_status(networked.deref(), remote)?;
compare_status_and_request_blocks::<C>(
local, remote, service, peer_id, request_id,
);
Ok(())
},
),
))
}
RPCErrorResponse::Success(RPCResponse::BlocksByRange(bytes)) => {
info!(
"received BlocksByRange response chunk (peer_id: {}, bytes: {})",
peer_id,
Hs(bytes.as_slice()),
);
let beacon_block =
BeaconBlock::from_ssz_bytes(bytes.as_slice()).map_err(DebugAsError::new)?;
info!(
"decoded BlocksByRange response chunk (peer_id: {}, beacon_block: {:?})",
peer_id, beacon_block,
);
Ok(Box::new(self.lock_networked().and_then(|mut networked| {
networked.accept_beacon_block(beacon_block)
})))
}
RPCErrorResponse::Success(RPCResponse::BlocksByRoot(response_bytes)) => {
bail!(EventHandlerError::UnexpectedBlocksByRootResponse {
peer_id,
response_bytes
})
}
RPCErrorResponse::InvalidRequest(error_message) => {
bail!(EventHandlerError::InvalidRequest {
peer_id,
error_message,
})
}
RPCErrorResponse::ServerError(error_message) => bail!(EventHandlerError::ServerError {
peer_id,
error_message,
}),
RPCErrorResponse::Unknown(error_message) => bail!(EventHandlerError::UnknownResponse {
peer_id,
error_message,
}),
RPCErrorResponse::StreamTermination(ResponseTermination::BlocksByRange) => {
info!("peer {} terminated BlocksByRange response stream", peer_id);
Ok(Box::new(future::ok(())))
}
RPCErrorResponse::StreamTermination(ResponseTermination::BlocksByRoot) => {
bail!(EventHandlerError::UnexpectedBlocksByRootTermination { peer_id })
}
}
}
fn handle_peer_dialed(&mut self, peer_id: PeerId) -> Result<EventFuture> {
info!("peer {} dialed", peer_id);
let request_id = self.request_id()?;
Ok(Box::new(
self.lock_networked()
.join(self.lock_service())
.map(move |(networked, mut service)| {
let status = networked.get_status();
info!(
"sending Status request (peer_id: {}, status: {:?})",
peer_id, status,
);
service.swarm.send_rpc(
peer_id,
RPCEvent::Request(
request_id,
RPCRequest::Status(status_into_status_message(status)),
),
);
}),
))
}
fn handle_pubsub_message(
&self,
message_id: MessageId,
source: PeerId,
topics: Vec<TopicHash>,
message: PubsubMessage,
) -> Result<EventFuture> {
match message {
PubsubMessage::Block(bytes) => {
info!("received beacon block as gossip: {}", Hs(bytes.as_slice()));
let beacon_block =
BeaconBlock::from_ssz_bytes(bytes.as_slice()).map_err(DebugAsError::new)?;
info!("decoded gossiped beacon block: {:?}", beacon_block);
Ok(Box::new(self.lock_networked().and_then(|mut networked| {
networked.accept_beacon_block(beacon_block)
})))
}
PubsubMessage::Attestation(bytes) => {
info!(
"received beacon attestation as gossip: {}",
Hs(bytes.as_slice()),
);
let attestation =
Attestation::from_ssz_bytes(bytes.as_slice()).map_err(DebugAsError::new)?;
info!("decoded gossiped beacon attestation: {:?}", attestation);
Ok(Box::new(self.lock_networked().and_then(|mut networked| {
networked.accept_beacon_attestation(attestation)
})))
}
_ => bail!(EventHandlerError::UnsupportedGossipedObjectType {
message_id,
peer_id: source,
topics,
message,
}),
}
}
fn lock_networked(&self) -> impl Future<Item = Guard<N>, Error = Error> {
self.networked.clone().lock().from_err()
}
fn lock_service(&self) -> impl Future<Item = Guard<Service>, Error = Error> {
self.service.clone().lock().from_err()
}
fn request_id(&mut self) -> Result<usize> {
let request_id = self.next_request_id;
self.next_request_id = self
.next_request_id
.checked_add(1)
.ok_or(EventHandlerError::RequestIdsExhausted)?;
Ok(request_id)
}
}
// We have to implement `Future` manually because using `Stream` combinators with
// `Service` consumes it and makes it impossible to access `Service.swarm`.
//
// The implementation is roughly equivalent to:
// ```
// let handle_events = service.for_each(|libp2p_event| …);
// let publish_gossip = self.networked_receiver.0.for_each(|gossip| …);
// handle_events.join(publish_gossip)
// ```
impl<C: Config, N: Networked<C>> Future for EventHandler<C, N> {
type Item = ();
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
// Handle all `Libp2pEvent`s currently available from `Service`.
loop {
if let Some(in_progress) = &mut self.in_progress {
try_ready!(in_progress.poll());
self.in_progress = None;
}
let mut service = try_ready!(self.lock_service().poll());
match service.poll().map_err(SyncError::new)? {
Async::Ready(Some(libp2p_event)) => {
self.in_progress = Some(self.handle_libp2p_event(libp2p_event)?);
}
Async::Ready(None) => {
// See <https://github.com/sigp/lighthouse/blob/c04026d073d12a98499c9cebd6d6134fc75355a9/beacon_node/eth2-libp2p/src/service.rs#L202>.
unreachable!("<Service as Stream> should never end");
}
Async::NotReady => break,
};
}
// Publish all `Gossip`s received through `networked_receiver`.
//
// This will keep polling the `UnboundedReceiver` after it has been exhausted.
// `UnboundedReceiver` does not panic in that scenario, so there is no need to use
// `Stream::fuse`.
let swarm = &mut try_ready!(self.lock_service().poll()).swarm;
while let Some(gossip) = try_ready!(self
.networked_receiver
.0
.poll()
// Channel receivers from `futures` are supposed to never fail,
// but `futures` 0.1 uses `()` as the `Error` type for infallible `Stream`s.
.map_err(|()| -> Self::Error { unreachable!("UnboundedReceiver should never fail") }))
{
match gossip {
Gossip::BeaconBlock(beacon_block) => swarm.publish(
&[Topic::new("/eth2/beacon_block/ssz".to_owned())],
PubsubMessage::Block(beacon_block.as_ssz_bytes()),
),
Gossip::BeaconAttestation(attestation) => swarm.publish(
&[Topic::new("/eth2/beacon_attestation/ssz".to_owned())],
PubsubMessage::Attestation(attestation.as_ssz_bytes()),
),
}
}
Ok(Async::NotReady)
}
}
pub fn channel<C: Config>() -> (Sender<C>, Receiver<C>) {
let (sender, receiver) = mpsc::unbounded();
(Sender(sender), Receiver(receiver))
}
pub fn run_network<C: Config, N: Networked<C>>(
config: NetworkConfig,
networked: Qutex<N>,
networked_receiver: Receiver<C>,
) -> Result<impl Future<Item = (), Error = Error>> {
let logger = Logger::root(StdLog.fuse(), o!());
let service = Service::new(config, logger).map_err(SyncError::new)?;
Ok(EventHandler {
networked,
networked_receiver,
service: Qutex::new(service),
next_request_id: 0,
in_progress: None,
})
}
fn status_message_to_status(status_message: &StatusMessage) -> Status {
let StatusMessage {
fork_version,
finalized_root,
finalized_epoch,
head_root,
head_slot,
} = *status_message;
Status {
fork_version,
finalized_root,
finalized_epoch: finalized_epoch.into(),
head_root,
head_slot: head_slot.into(),
}
}
fn status_into_status_message(status: Status) -> StatusMessage {
let Status {
fork_version,
finalized_root,
finalized_epoch,
head_root,
head_slot,
} = status;
StatusMessage {
fork_version,
finalized_root,
finalized_epoch: finalized_epoch.into(),
head_root,
head_slot: head_slot.into(),
}
}
fn get_and_check_status<C: Config, N: Networked<C>>(
networked: &N,
remote: Status,
) -> Result<Status> {
let local = networked.get_status();
ensure!(
local.fork_version == remote.fork_version,
EventHandlerError::ForkVersionMismatch {
local: local.fork_version,
remote: remote.fork_version,
},
);
Ok(local)
}
fn compare_status_and_request_blocks<C: Config>(
local: Status,
remote: Status,
mut service: Guard<Service>,
peer_id: PeerId,
request_id: RequestId,
) {
// We currently do not check if `remote.finalized_root` is present in the local chain at
// `remote.finalized_epoch` because there is no easy way to do it with our implementation of the
// fork choice store.
if (local.finalized_epoch, local.head_slot) < (remote.finalized_epoch, remote.head_slot) {
let request = BlocksByRangeRequest {
head_block_root: remote.head_root,
start_slot: misc::compute_start_slot_at_epoch::<C>(remote.finalized_epoch),
count: u64::max_value(),
step: 1,
};
info!(
"sending BlocksByRange request (peer_id: {}, request: {:?})",
peer_id, request,
);
service.swarm.send_rpc(
peer_id,
RPCEvent::Request(request_id, RPCRequest::BlocksByRange(request)),
);
}
}
|
use crate::scene::SceneItem;
use crate::vector::Vec3;
use crate::ray::Ray;
use crate::intersectable::{Intersectable, Intersection};
use rand::Rng;
use std::f64;
use std::cmp::Ordering;
use std::mem;
#[derive(Copy, Clone, Debug)]
pub struct AABB {
min: Vec3,
max: Vec3
}
impl AABB {
pub fn new(a: Vec3, b: Vec3) -> AABB {
AABB { min: a, max: b }
}
pub fn min(&self) -> Vec3 { self.min }
pub fn max(&self) -> Vec3 { self.max }
pub fn intersect(&self, ray: &Ray) -> bool {
let invn_dir_x = 1. / ray.direction().x();
let invn_dir_y = 1. / ray.direction().y();
let invn_dir_z = 1. / ray.direction().z();
let mut tx_min = (self.min.x() - ray.origin().x()) * invn_dir_x;
let mut tx_max = (self.max.x() - ray.origin().x()) * invn_dir_x;
if invn_dir_x < 0. {
mem::swap(&mut tx_min, &mut tx_max);
}
let mut ty_min = (self.min.y() - ray.origin().y()) * invn_dir_y;
let mut ty_max = (self.max.y() - ray.origin().y()) * invn_dir_y;
if invn_dir_y < 0. {
mem::swap(&mut ty_min, &mut ty_max);
}
if (tx_min > ty_max) || (ty_min > tx_max) {
return false;
}
if ty_min > tx_min {
tx_min = ty_min;
}
if ty_max < tx_max {
tx_max = ty_max
}
let mut tz_min = (self.min.z() - ray.origin().z()) * invn_dir_z;
let mut tz_max = (self.max.z() - ray.origin().z()) * invn_dir_z;
if invn_dir_z < 0. {
mem::swap(&mut tz_min, &mut tz_max);
}
if tx_min > tz_max || tz_min > tx_max {
return false
}
// if tz_min > tx_min {
// tx_min = tz_min
// }
// if tz_max < tx_max {
// tx_max = tz_max
// }
return true
}
}
pub trait BoundingBox {
fn bounding_box(&self) -> AABB;
}
pub fn surrounding_box(box0: &AABB, box1: &AABB) -> AABB {
let small = Vec3::new(
f64::min(box0.min().x(), box1.min().x()),
f64::min(box0.min().y(), box1.min().y()),
f64::min(box0.min().z(), box1.min().z())
);
let big = Vec3::new(
f64::max(box0.max().x(), box1.max().x()),
f64::max(box0.max().y(), box1.max().y()),
f64::max(box0.max().z(), box1.max().z())
);
AABB::new(small, big)
}
pub struct BVH {
// can has (left AND right) OR item
pub bbox: AABB,
left: Option<Box<BVH>>,
right: Option<Box<BVH>>,
item: Option<SceneItem>
}
impl BVH {
pub fn new(items: &mut [SceneItem]) -> BVH {
let axis_ind = (3. * rand::thread_rng().gen::<f32>()) as u8;
if axis_ind == 0 {
items.sort_by(|a, b| a.bounding_box().min().x().partial_cmp(&b.bounding_box().min().x()).unwrap_or(Ordering::Equal));
} else if axis_ind == 1 {
items.sort_by(|a, b| a.bounding_box().min().y().partial_cmp(&b.bounding_box().min().y()).unwrap_or(Ordering::Equal));
} else {
items.sort_by(|a, b| a.bounding_box().min().z().partial_cmp(&b.bounding_box().min().z()).unwrap_or(Ordering::Equal));
}
if items.len() == 1 {
return BVH {
left: None,
right: None,
item: Some(items[0]),
bbox: items[0].bounding_box()
}
} else {
let middle = (items.len() / 2) as usize;
let left = BVH::new(&mut items[0..middle]);
let right = BVH::new(&mut items[middle..]);
let l_bb = left.bounding_box();
let r_bb = right.bounding_box();
return BVH {
left: Some(Box::new(left)),
right: Some(Box::new(right)),
bbox: surrounding_box(&l_bb, &r_bb),
item: None
}
}
}
pub fn left(&self) -> Option<&BVH> { self.left.as_deref() }
pub fn right(&self) -> Option<&BVH> { self.right.as_deref() }
pub fn item(&self) -> Option<SceneItem> { self.item }
pub fn intersect(&self, ray: &Ray) -> Option<Intersection> {
match self.item {
Some(item) => {
let point = item.intersect(&ray);
if point > 0. {
return Some(Intersection {
intersected: item,
dist: point
})
} else {
return None;
}
},
None => {
if self.bbox.intersect(&ray) {
let l = self.left().unwrap();
let r = self.right().unwrap();
let intersect_left = l.intersect(&ray);
let intersect_right = r.intersect(&ray);
match intersect_left {
Some(i_left) => {
match intersect_right {
Some(i_right) => {
if i_left.dist < i_right.dist {
return Some(i_left);
} else {
return Some(i_right);
}
},
None => {
return Some(i_left)
}
}
},
None => {
match intersect_right {
Some(i_right) => {
return Some(i_right);
},
None => None
}
}
}
} else {
return None;
}
}
}
}
}
impl BoundingBox for BVH {
fn bounding_box(&self) -> AABB {
match self.left() {
Some(left) => {
match self.right() {
Some(right) => surrounding_box(&left.bbox, &right.bbox),
None => left.bbox
}
},
None => {
match self.item {
Some(item) => item.bounding_box(),
// if BVH not has leafs and not has item
None => panic!("Empty BVH")
}
}
}
}
}
|
use array2d::Array2D;
fn format_board(board: &Array2D<String>) -> String {
board
.rows_iter()
.map(|row_iter| row_iter.cloned().collect::<Vec<_>>().join("|"))
.collect::<Vec<_>>()
.join("\n-----\n")
}
fn main() {
let mut board = Array2D::filled_with(" ".to_string(), 3, 3);
println!("{}\n", format_board(&board));
board[(0, 2)] = "X".to_string();
println!("{}\n", format_board(&board));
}
|
use std::collections::HashMap;
use prettytable::{Attr, Cell, Row, Table};
use crate::github;
fn insert_headers(header_one: &mut Vec<Cell>, header_two: &mut Vec<Cell>, repo: &str) {
let before = header_two.len();
header_two.push(Cell::new("Last Updated").with_style(Attr::Bold));
header_two.push(Cell::new("Updated By").with_style(Attr::Bold));
header_two.push(Cell::new("PR").with_style(Attr::Bold));
let after = header_two.len();
header_one.push(Cell::new(repo).with_style(Attr::Bold).with_hspan(after - before));
}
fn insert_cells(cells: &mut Vec<Cell>, value: &HashMap<String, github::Branch>, repo: &str) {
match value.get(repo) {
Some(repo) => {
cells.push(Cell::new(&repo.last_updated));
cells.push(Cell::new(&repo.last_updated_by));
match repo.pr {
Some(pr) => {
let pr_string = format!("{}", pr);
cells.push(Cell::new(&pr_string));
}
None => cells.push(Cell::new("")),
};
}
None => {
cells.push(Cell::new(""));
cells.push(Cell::new(""));
cells.push(Cell::new(""));
}
}
}
pub fn print(branches: Vec<(&String, &HashMap<String, github::Branch>)>, repos: Vec<&str>) {
let mut header_one = vec![Cell::new("Branch").with_style(Attr::Bold)];
let mut header_two = vec![Cell::new("")];
for repo in &repos {
insert_headers(&mut header_one, &mut header_two, &repo);
}
let mut table = Table::new();
table.add_row(Row::new(header_one));
table.add_row(Row::new(header_two));
for (key, value) in branches {
let mut cells = vec![Cell::new(key)];
for repo in &repos {
insert_cells(&mut cells, value, repo);
}
table.add_row(Row::new(cells));
}
table.printstd();
} |
use core::ops::DerefMut;
use core::pin::Pin;
use core::task::{Context, Poll};
#[cfg(feature = "alloc")]
use alloc::boxed::Box;
#[cfg(feature = "std")]
use futures::io as std_io;
use super::error::Result;
/// Read bytes asynchronously.
///
/// This trait is analogous to the `std::io::BufRead` trait, but integrates
/// with the asynchronous task system. In particular, the `poll_fill_buf`
/// method, unlike `BufRead::fill_buf`, will automatically queue the current task
/// for wakeup and return if data is not yet available, rather than blocking
/// the calling thread.
pub trait AsyncBufRead {
/// Attempt to return the contents of the internal buffer, filling it with more data
/// from the inner reader if it is empty.
///
/// On success, returns `Poll::Ready(Ok(buf))`.
///
/// If no data is available for reading, the method returns
/// `Poll::Pending` and arranges for the current task (via
/// `cx.waker().wake_by_ref()`) to receive a notification when the object becomes
/// readable or is closed.
///
/// This function is a lower-level call. It needs to be paired with the
/// [`consume`] method to function properly. When calling this
/// method, none of the contents will be "read" in the sense that later
/// calling [`poll_read`] may return the same contents. As such, [`consume`] must
/// be called with the number of bytes that are consumed from this buffer to
/// ensure that the bytes are never returned twice.
///
/// [`poll_read`]: AsyncBufRead::poll_read
/// [`consume`]: AsyncBufRead::consume
///
/// An empty buffer returned indicates that the stream has reached EOF.
///
/// # Implementation
///
/// This function may not return errors of kind `WouldBlock` or
/// `Interrupted`. Implementations must convert `WouldBlock` into
/// `Poll::Pending` and either internally retry or convert
/// `Interrupted` into another error kind.
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<&[u8]>>;
/// Tells this buffer that `amt` bytes have been consumed from the buffer,
/// so they should no longer be returned in calls to [`poll_read`].
///
/// This function is a lower-level call. It needs to be paired with the
/// [`poll_fill_buf`] method to function properly. This function does
/// not perform any I/O, it simply informs this object that some amount of
/// its buffer, returned from [`poll_fill_buf`], has been consumed and should
/// no longer be returned. As such, this function may do odd things if
/// [`poll_fill_buf`] isn't called before calling it.
///
/// The `amt` must be `<=` the number of bytes in the buffer returned by
/// [`poll_fill_buf`].
///
/// [`poll_read`]: AsyncBufRead::poll_read
/// [`poll_fill_buf`]: AsyncBufRead::poll_fill_buf
fn consume(self: Pin<&mut Self>, amt: usize);
}
/// Write bytes asynchronously.
///
/// This trait is analogous to the `core::io::Write` trait, but integrates
/// with the asynchronous task system. In particular, the `poll_write`
/// method, unlike `Write::write`, will automatically queue the current task
/// for wakeup and return if the writer cannot take more data, rather than blocking
/// the calling thread.
pub trait AsyncWrite {
/// Attempt to write bytes from `buf` into the object.
///
/// On success, returns `Poll::Ready(Ok(num_bytes_written))`.
///
/// If the object is not ready for writing, the method returns
/// `Poll::Pending` and arranges for the current task (via
/// `cx.waker().wake_by_ref()`) to receive a notification when the object becomes
/// writable or is closed.
///
/// # Implementation
///
/// This function may not return errors of kind `WouldBlock` or
/// `Interrupted`. Implementations must convert `WouldBlock` into
/// `Poll::Pending` and either internally retry or convert
/// `Interrupted` into another error kind.
///
/// `poll_write` must try to make progress by flushing the underlying object if
/// that is the only way the underlying object can become writable again.
fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize>>;
}
macro_rules! defer_async_read {
() => {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<&[u8]>> {
Pin::new(&mut **self.get_mut()).poll_fill_buf(cx)
}
fn consume(mut self: Pin<&mut Self>, amt: usize) {
Pin::new(&mut **self).consume(amt)
}
};
}
#[cfg(feature = "alloc")]
impl<T: ?Sized + AsyncBufRead + Unpin> AsyncBufRead for Box<T> {
defer_async_read!();
}
impl<T: ?Sized + AsyncBufRead + Unpin> AsyncBufRead for &mut T {
defer_async_read!();
}
impl<P> AsyncBufRead for Pin<P>
where
P: DerefMut + Unpin,
P::Target: AsyncBufRead,
{
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<&[u8]>> {
self.get_mut().as_mut().poll_fill_buf(cx)
}
fn consume(self: Pin<&mut Self>, amt: usize) {
self.get_mut().as_mut().consume(amt)
}
}
macro_rules! deref_async_write {
() => {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize>> {
Pin::new(&mut **self).poll_write(cx, buf)
}
};
}
#[cfg(feature = "alloc")]
impl<T: ?Sized + AsyncWrite + Unpin> AsyncWrite for Box<T> {
deref_async_write!();
}
impl<T: ?Sized + AsyncWrite + Unpin> AsyncWrite for &mut T {
deref_async_write!();
}
impl<P> AsyncWrite for Pin<P>
where
P: DerefMut + Unpin,
P::Target: AsyncWrite,
{
fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize>> {
self.get_mut().as_mut().poll_write(cx, buf)
}
}
#[cfg(feature = "std")]
pub struct FromStdIo<T>(T);
#[cfg(feature = "std")]
impl<T> FromStdIo<T> {
pub fn new(inner: T) -> Self {
Self(inner)
}
}
#[cfg(feature = "std")]
impl<T: std_io::AsyncBufRead> AsyncBufRead for FromStdIo<T> {
fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<&[u8]>> {
let Self(inner) = unsafe { self.get_unchecked_mut() };
unsafe { Pin::new_unchecked(inner) }
.poll_fill_buf(cx)
.map_err(|e| e.into())
}
fn consume(self: Pin<&mut Self>, amt: usize) {
let Self(inner) = unsafe { self.get_unchecked_mut() };
unsafe { Pin::new_unchecked(inner) }.consume(amt)
}
}
#[cfg(feature = "std")]
impl<T: std_io::AsyncWrite> AsyncWrite for FromStdIo<T> {
fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize>> {
let Self(inner) = unsafe { self.get_unchecked_mut() };
unsafe { Pin::new_unchecked(inner) }
.poll_write(cx, buf)
.map_err(|e| e.into())
}
}
|
use hyper::{body, header, Body, Method, Request, Uri};
use hyper_rustls::HttpsConnector;
const ENDPOINT: &str = "https://getpocket.com/v3";
const REDIRECT_URL: &str = "https://getpocket.com";
pub fn url(method: &str) -> Uri {
let url = format!("{}{}", ENDPOINT, method);
url.parse()
.unwrap_or_else(|_| panic!("Could not parse url: {}", url))
}
pub struct Client {
pub consumer_key: String,
pub authorization_code: String,
}
pub struct BeginAuthentication {
pub consumer_key: String,
}
pub struct AuthorizationRequest {
consumer_key: String,
request_code: String,
}
pub fn https_client() -> hyper::Client<HttpsConnector<hyper::client::HttpConnector>> {
let https = HttpsConnector::with_native_roots();
hyper::Client::builder().build::<_, hyper::Body>(https)
}
impl BeginAuthentication {
pub async fn request_authorization_code(self) -> AuthorizationRequest {
let body = self.request().await;
let code = body
.split('=')
.nth(1)
.expect("Could not retrieve the authorization code from the authentication request");
AuthorizationRequest {
consumer_key: self.consumer_key,
request_code: code.to_owned(),
}
}
async fn request(&self) -> String {
let client = https_client();
let req = Request::builder()
.method(Method::POST)
.uri(url("/oauth/request"))
.header(header::CONTENT_TYPE, "application/x-www-form-urlencoded")
.header(header::CONNECTION, "close")
.body(Body::from(format!(
"consumer_key={}&redirect_uri={}",
&self.consumer_key, REDIRECT_URL
)))
.unwrap();
let res = client
.request(req)
.await
.expect("Could not request OAuth authorization");
let body_bytes = body::to_bytes(res.into_body())
.await
.expect("Could not read OAuth response body");
String::from_utf8(body_bytes.to_vec()).expect("Response was not valid UTF-8")
}
}
impl AuthorizationRequest {
pub fn authorization_url(&self) -> String {
format!(
"https://getpocket.com/auth/authorize?request_token={}&redirect_uri={}",
&self.request_code, REDIRECT_URL
)
}
pub async fn request_authorized_code(self) -> Client {
let body = self.request().await;
let first_value = body
.split('=')
.nth(1)
.expect("Could not extract authorization line from response");
let code = first_value
.split('&')
.next()
.expect("Could not extract authorization code from response")
.to_owned();
Client {
consumer_key: self.consumer_key,
authorization_code: code,
}
}
async fn request(&self) -> String {
let client = https_client();
let req = Request::builder()
.method(Method::POST)
.uri(url("/oauth/authorize"))
.header(header::CONTENT_TYPE, "application/x-www-form-urlencoded")
.header(header::CONNECTION, "close")
.body(Body::from(format!(
"consumer_key={}&code={}",
&self.consumer_key, &self.request_code
)))
.unwrap();
let res = client
.request(req)
.await
.expect("Could not make authorization request");
let body_bytes = body::to_bytes(res.into_body())
.await
.expect("Could not read authorization response body");
String::from_utf8(body_bytes.to_vec()).expect("Response was not valid UTF-8")
}
}
|
use iron::prelude::*;
use router::Router;
use urlencoded::UrlEncodedBody;
use urlencoded::UrlEncodedQuery;
// Extracts a string parameter from the path
pub fn extract_param_from_path(req: &Request, parameter: &str) -> String {
req.extensions.get::<Router>().unwrap().find(parameter).unwrap_or("").to_string()
}
pub fn extract_param_value_from_encoded_body(req: &mut Request, parameter: &str) -> Option<String> {
let encoded_body = req.get_ref::<UrlEncodedBody>().unwrap();
match encoded_body.get(parameter) {
Some(n) => Some(n[0].to_string()),
None => None
}
}
pub fn extract_i32_param_value_from_encoded_body(req: &mut Request, parameter: &str) -> Option<i32> {
let string_value = extract_param_value_from_encoded_body(req, parameter);
match string_value {
Some(n) => Some(n.parse::<i32>().unwrap()),
None => None
}
}
pub fn extract_string_param(req: &mut Request, parameter: &str) -> Option<String> {
match req.get_ref::<UrlEncodedQuery>() {
Ok(ref hashmap) => {
let param = hashmap.get(parameter);
match param {
Some(n) => Some(n[0].parse::<String>().unwrap()),
None => None
}
},
Err(_) => None
}
}
|
use std::cmp::Ordering;
use super::{Gui, WidgetNode, text::Text, button::Button};
pub trait GuiValueListener<T> {
fn value_changed(&mut self, gui: &mut Gui, new_value: &T);
}
pub struct GuiValue<T> where T: Clone + PartialEq {
value: Option<T>,
listeners: Vec<Box<dyn GuiValueListener<T>>>,
}
impl<T> GuiValue<T> where T: Clone + PartialEq {
pub fn new() -> GuiValue<T> {
GuiValue { value: None, listeners: Vec::new() }
}
pub fn get(&self) -> T { self.get_ref().clone() }
pub fn get_ref(&self) -> &T { self.value.as_ref().unwrap() }
pub fn set(&mut self, gui: &mut Gui, value: T) {
if self.value.is_some() && self.get_ref().eq(&value) { return; }
self.value = Some(value);
for listener in self.listeners.iter_mut() {
listener.value_changed(gui, self.value.as_ref().unwrap());
}
}
pub fn add_listener<L>(&mut self, listener: L) where L: GuiValueListener<T> + 'static {
self.listeners.push(Box::new(listener))
}
}
pub struct SetText(pub WidgetNode<Text>);
impl GuiValueListener<String> for SetText {
fn value_changed(&mut self, gui: &mut Gui, new_value: &String) {
gui.get_mut(self.0).unwrap().set_text(new_value.clone());
}
}
pub struct EnableButton(pub WidgetNode<Button>);
impl GuiValueListener<bool> for EnableButton {
fn value_changed(&mut self, gui: &mut Gui, new_value: &bool) {
gui.get_mut(self.0).unwrap().set_enabled(*new_value);
}
}
pub struct ConvertString<T>(pub T) where T: GuiValueListener<String>;
impl<T, I> GuiValueListener<I> for ConvertString<T> where T: GuiValueListener<String>, I: ToString {
fn value_changed(&mut self, gui: &mut Gui, new_value: &I) {
let value_str = new_value.to_string();
self.0.value_changed(gui, &value_str);
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum Comparison {
Equal,
NotEqual,
Greater,
GreaterEqual,
Less,
LessEqual,
}
impl Comparison {
fn matches(self, ordering: Ordering) -> bool {
match self {
Comparison::Equal => ordering == Ordering::Equal,
Comparison::NotEqual => ordering != Ordering::Equal,
Comparison::Greater => ordering == Ordering::Greater,
Comparison::GreaterEqual => ordering != Ordering::Less,
Comparison::Less => ordering == Ordering::Less,
Comparison::LessEqual => ordering != Ordering::Greater,
}
}
}
pub struct Compare<V, T>(pub Comparison, pub V, pub T) where V: PartialOrd, T: GuiValueListener<bool>;
impl<V, T> GuiValueListener<V> for Compare<V, T> where V: PartialOrd, T: GuiValueListener<bool> {
fn value_changed(&mut self, gui: &mut Gui, new_value: &V) {
let value_bool = new_value.partial_cmp(&self.1).map(|ord| self.0.matches(ord)).unwrap_or_default();
self.2.value_changed(gui, &value_bool);
}
}
|
use crate::cli::History;
use crate::data::config;
use crate::data::{Dictionary, Value};
use crate::errors::ShellError;
use crate::prelude::*;
use crate::TaggedDictBuilder;
use crate::commands::WholeStreamCommand;
use crate::parser::registry::Signature;
use indexmap::IndexMap;
pub struct Env;
impl WholeStreamCommand for Env {
fn name(&self) -> &str {
"env"
}
fn signature(&self) -> Signature {
Signature::build("env")
}
fn usage(&self) -> &str {
"Get the current environment."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
env(args, registry)
}
}
pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Error>> {
let mut indexmap = IndexMap::new();
let path = std::env::current_dir()?;
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag));
if let Some(home) = dirs::home_dir() {
indexmap.insert("home".to_string(), Value::path(home).tagged(tag));
}
let config = config::default_path()?;
indexmap.insert("config".to_string(), Value::path(config).tagged(tag));
let history = History::path();
indexmap.insert("history".to_string(), Value::path(history).tagged(tag));
let temp = std::env::temp_dir();
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag));
let mut dict = TaggedDictBuilder::new(tag);
for v in std::env::vars() {
dict.insert(v.0, Value::string(v.1));
}
if !dict.is_empty() {
indexmap.insert("vars".to_string(), dict.into_tagged_value());
}
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag))
}
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut env_out = VecDeque::new();
let tag = args.call_info.name_tag;
let value = get_environment(tag)?;
env_out.push_back(value);
Ok(env_out.to_output_stream())
}
|
extern crate libc;
mod magic;
mod admin;
mod tests;
|
#[macro_use]
extern crate rocket;
pub mod apple;
pub mod config;
pub mod microsoft;
pub mod mozilla;
#[launch]
fn rocket() -> _ {
let config = config::MailConfig::read("/etc/automail/config.toml")
.or(config::MailConfig::read("config.toml"))
.expect("error: unable to load the configuration file");
rocket::build()
.manage(config)
.mount("/", routes![apple::mobileconfig, mozilla::autoconfig, mozilla::autoconfig_wellknown, microsoft::autodiscover])
}
|
extern crate num;
use std::collections::VecDeque;
fn act2_4_1(n: usize, l: usize, p:usize, a: &Vec<usize>, b: &Vec<usize>) -> Option<usize> {
let mut aCp = a.clone();
let mut bCp = b.clone();
aCp.push(l);
aCp.push(0);
let mut n2 = n;
let mut que = VecDeque::new();
let mut ans = 0;
let mut pos = 0;
let mut tank = p;
for i in 0..n2{
let mut d = aCp[i] - pos;
let mut tankd: i32 = (tank as i32) - (d as i32);
while tankd < 0 {
if que.is_empty() {
println!("-1");
return None
}
tank += que.pop_back().unwrap();
ans+=1;
tankd = (tank as i32) - (d as i32);
}
tank -= d;
pos = aCp[i];
que.push_back(bCp[i]);
}
return Some(ans)
}
fn vecDequeSort(vd: &VecDeque<usize>) -> VecDeque<usize> {
let mut que: VecDeque<usize> = VecDeque::new();
let mut v = Vec::new();
for (_, &item) in vd.as_slices().0.iter().enumerate() {
v.push(item);
}
v.sort();
for x in v {
que.push_back(x);
}
return que
}
fn act2_4_2(n: usize, l: &Vec<usize>) -> i64 {
let mut ans: i64 = 0;
let mut que: VecDeque<usize> = VecDeque::new();
for i in 0..n {
que.push_back(l[i])
}
while que.len() > 1 {
let mut l1 = que.pop_back().unwrap();
let mut l2 = que.pop_back().unwrap();
ans += (l1 + l2) as i64;
que.push_back(l1 + l2);
que = vecDequeSort(&que);
}
return ans
}
// 引用元
// http://sntea.hatenablog.com/entry/2017/06/07/091246
// 上記を少しだけ修正した。
struct UnionFind {
par: Vec<usize>,
rank: Vec<usize>,
}
impl UnionFind {
fn new(n: usize) -> UnionFind {
let mut vec = vec![0;n];
for i in 0..n {
vec[i] = i;
}
UnionFind {
par : vec,
rank : vec![0;n],
}
}
fn find(&mut self, x: usize) -> usize {
if x == self.par[x] {
x
}else{
let res = self.find(self.par[x]);
self.par[x] = res;
res
}
}
fn same(&mut self, a: usize, b: usize) -> bool {
self.find(a) == self.find(b)
}
fn unite(&mut self, a: usize, b: usize){
let apar = self.find(a);
let bpar = self.find(b);
if apar == bpar{
return
}
if self.rank[apar] < self.rank[bpar] {
self.par[apar] = bpar;
}else{
self.par[bpar] = apar;
if self.rank[apar] == self.rank[bpar] {
self.rank[apar] += 1;
}
}
}
fn debug(&mut self){
println!("par:{:?}, rank:{:?}", self.par, self.rank);
}
}
fn act2_4_3(n: usize, k: usize, inputList: &Vec<Vec<usize>>) -> usize {
let mut xList = Vec::new();
let mut yList = Vec::new();
let mut tList = Vec::new();
for x in inputList {
tList.push(x[0]);
xList.push(x[1]);
yList.push(x[2]);
}
println!("xList:{:?}", xList);
println!("yList:{:?}", yList);
println!("tList:{:?}", tList);
let mut uf = UnionFind::new(n * 3);
let mut ans = 0;
for i in 0..k {
println!("{}つ目", i + 1);
let t = tList[i];
let x = xList[i] - 1;
let y = yList[i] - 1;
if x < 0 || n <= x || y < 0 || n <= y {
ans += 1;
continue
}
if t == 1 {
if uf.same(x, y + n) || uf.same(x, y + 2 * n) {
ans += 1;
}else{
uf.unite(x, y);
uf.unite(x + n, y + n);
uf.unite(x + n * 2, y + n * 2);
}
}else{
if uf.same(x, y) || uf.same(x, y + 2 * n) {
ans += 1;
}else{
uf.unite(x, y + n);
uf.unite(x + n, y + 2 * n);
uf.unite(x + 2 * n, y);
}
}
}
return ans
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn act2_4_1_test(){
assert_eq!(Some(2), act2_4_1(4, 25, 10, &vec![10, 14, 20, 21], &vec![10, 5, 2, 4]));
}
#[test]
fn act2_4_2_test(){
assert_eq!(34, act2_4_2(3, &vec![8, 5, 8]));
}
#[test]
fn act2_4_3_test(){
let inputList = vec![vec![1,101,1],
vec![2,1,2],
vec![2,2,3],
vec![2,3,3],
vec![1,1,3],
vec![2,3,1],
vec![1,5,5]];
assert_eq!(3, act2_4_3(100, 7, &inputList));
}
}
|
use core::fmt::Debug;
use halo2::arithmetic::FieldExt;
use halo2::pasta::{pallas, vesta};
use std::cell::UnsafeCell;
use std::marker::PhantomData;
use std::mem::transmute;
use std::slice;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
pub const TEST_SEED: [u8; 16] = [42; 16];
// Question: Should the naming of `PallasVDF` and `VestaVDF` be reversed?
#[derive(Debug, Clone, Copy)]
pub enum EvalMode {
LTRSequential,
LTRAddChainSequential,
RTLSequential,
RTLParallel,
RTLAddChainSequential,
RTLAddChainParallel,
}
impl EvalMode {
pub fn all() -> Vec<EvalMode> {
vec![
Self::LTRSequential,
Self::LTRAddChainSequential,
Self::RTLSequential,
Self::RTLParallel,
Self::RTLAddChainSequential,
Self::RTLAddChainParallel,
]
}
}
#[derive(Debug)]
struct Sq(Arc<UnsafeCell<Box<[[u64; 4]]>>>);
unsafe impl Send for Sq {}
unsafe impl Sync for Sq {}
/// Modulus is that of `Fq`, which is the base field of `Vesta` and scalar field of `Pallas`.
#[derive(Debug)]
pub struct PallasVDF {
eval_mode: EvalMode,
}
impl RaguVDF<pallas::Scalar> for PallasVDF {
fn new_with_mode(eval_mode: EvalMode) -> Self {
PallasVDF { eval_mode }
}
// To bench with this on 3970x:
// RUSTFLAG="-C target-cpu=native -g" taskset -c 0,40 cargo bench
fn eval(&mut self, x: RoundValue<pallas::Scalar>, t: u64) -> RoundValue<pallas::Scalar> {
match self.eval_mode {
EvalMode::LTRSequential
| EvalMode::LTRAddChainSequential
| EvalMode::RTLAddChainSequential
| EvalMode::RTLSequential => self.simple_eval(x, t),
EvalMode::RTLAddChainParallel => self.eval_rtl_addition_chain(x, t),
EvalMode::RTLParallel => self.eval_rtl(x, t),
}
}
fn element(n: u64) -> pallas::Scalar {
pallas::Scalar::from(n)
}
/// Pallas' inverse_exponent is 5, so we can hardcode this.
fn inverse_step(x: pallas::Scalar) -> pallas::Scalar {
x.mul(&x.square().square())
}
fn forward_step(&mut self, x: pallas::Scalar) -> pallas::Scalar {
match self.eval_mode {
EvalMode::LTRSequential => self.forward_step_ltr_sequential(x),
EvalMode::RTLSequential => self.forward_step_rtl_sequential(x),
EvalMode::RTLAddChainSequential => self.forward_step_sequential_rtl_addition_chain(x),
EvalMode::LTRAddChainSequential => self.forward_step_ltr_addition_chain(x),
_ => unreachable!(),
}
}
}
impl PallasVDF {
/// Number of bits in exponent.
fn bit_count() -> usize {
254
}
// To bench with this on 3970x:
// RUSTFLAG="-C target-cpu=native -g" taskset -c 0,40 cargo bench
fn eval_rtl(&mut self, x: RoundValue<pallas::Scalar>, t: u64) -> RoundValue<pallas::Scalar> {
let bit_count = Self::bit_count();
let squares1 = Arc::new(UnsafeCell::new(vec![[0u64; 4]; 254].into_boxed_slice()));
let sq = Sq(squares1);
let ready = Arc::new(AtomicUsize::new(1)); // Importantly, not zero.
let ready_clone = Arc::clone(&ready);
crossbeam::scope(|s| {
s.spawn(|_| {
let squares = unsafe {
transmute::<&mut [[u64; 4]], &mut [pallas::Scalar]>(slice::from_raw_parts_mut(
(*sq.0.get()).as_mut_ptr(),
bit_count,
))
};
macro_rules! store {
($index:ident, $val:ident) => {
squares[$index] = $val;
ready.store($index, Ordering::SeqCst)
};
};
for _ in 0..t {
while ready.load(Ordering::SeqCst) != 0 {}
let mut next_square = squares[0];
for i in 0..Self::bit_count() {
if i > 0 {
next_square = next_square.square();
};
store!(i, next_square);
}
}
});
(0..t).fold(x, |acc, _| self.round_with_squares(acc, &sq, &ready_clone))
})
.unwrap()
}
// To bench with this on 3970x:
// RUSTFLAG="-C target-cpu=native -g" taskset -c 0,40 cargo bench
fn eval_rtl_addition_chain(
&mut self,
x: RoundValue<pallas::Scalar>,
t: u64,
) -> RoundValue<pallas::Scalar> {
let bit_count = Self::bit_count();
let squares1 = Arc::new(UnsafeCell::new(vec![[0u64; 4]; 254].into_boxed_slice()));
let sq = Sq(squares1);
let ready = Arc::new(AtomicUsize::new(1)); // Importantly, not zero.
let ready_clone = Arc::clone(&ready);
crossbeam::scope(|s| {
s.spawn(|_| {
let squares = unsafe {
transmute::<&mut [[u64; 4]], &mut [pallas::Scalar]>(slice::from_raw_parts_mut(
(*sq.0.get()).as_mut_ptr(),
bit_count,
))
};
macro_rules! store {
($index:ident, $val:ident) => {
squares[$index] = $val;
ready.store($index, Ordering::SeqCst)
};
};
for _ in 0..t {
while ready.load(Ordering::SeqCst) != 0 {}
let mut next_square = squares[0];
let first_section_bit_count = 128;
for i in 0..first_section_bit_count {
if i > 0 {
next_square = next_square.square();
};
store!(i, next_square);
}
let mut k = first_section_bit_count;
next_square = {
let mut x = next_square;
x = x.mul(&x.square());
x.mul(&x.square().square().square().square())
};
for j in 1..=(8 * 15 + 1) {
next_square = next_square.square();
if j % 8 == 1 {
store!(k, next_square);
k += 1;
}
}
}
});
(0..t).fold(x, |acc, _| self.round_with_squares(acc, &sq, &ready_clone))
})
.unwrap()
}
/// one round in the slow/forward direction.
#[inline]
fn round_with_squares(
&mut self,
x: RoundValue<pallas::Scalar>,
squares: &Sq,
ready: &Arc<AtomicUsize>,
) -> RoundValue<pallas::Scalar> {
RoundValue {
// Increment the value by the round number so problematic values
// like 0 and 1 don't consistently defeat the asymmetry.
value: match self.eval_mode {
EvalMode::RTLParallel => self.forward_step_with_squares_naive_rtl(
pallas::Scalar::add(&x.value, &x.round),
squares,
ready,
),
EvalMode::RTLAddChainParallel => self.forward_step_with_squares(
pallas::Scalar::add(&x.value, &x.round),
squares,
ready,
),
_ => panic!("fell through in round_with_squares"),
},
// Increment the round.
round: pallas::Scalar::add(&x.round, &pallas::Scalar::one()),
}
}
#[inline]
fn forward_step_with_squares(
&mut self,
x: pallas::Scalar,
squares: &Sq,
ready: &Arc<AtomicUsize>,
) -> pallas::Scalar {
let sq = squares.0.get();
unsafe { (**sq)[0] = transmute::<pallas::Scalar, [u64; 4]>(x) };
ready.store(0, Ordering::SeqCst);
let mut remaining = Self::exponent();
let mut acc = pallas::Scalar::one();
let bit_count = Self::bit_count();
let first_section_bit_count = 128;
let second_section_bit_count = bit_count - first_section_bit_count;
let n = first_section_bit_count + (second_section_bit_count / 8) + 1;
for next_index in 1..=n {
let current_index = next_index - 1;
let limb_index = current_index / 64;
let limb = remaining[limb_index];
let one = (limb & 1) == 1;
let in_second_section = next_index > first_section_bit_count;
if in_second_section || one {
while ready.load(Ordering::SeqCst)
< if next_index > 1 {
current_index
} else {
next_index
}
{}
let squares =
unsafe { transmute::<&[[u64; 4]], &[pallas::Scalar]>(&**(squares.0.get())) };
let elt = squares[current_index];
acc = acc.mul(&elt);
};
remaining[limb_index] = limb >> 1;
}
acc
}
#[inline]
fn forward_step_with_squares_naive_rtl(
&mut self,
x: pallas::Scalar,
squares: &Sq,
ready: &Arc<AtomicUsize>,
) -> pallas::Scalar {
let sq = squares.0.get();
unsafe { (**sq)[0] = transmute::<pallas::Scalar, [u64; 4]>(x) };
ready.store(0, Ordering::SeqCst);
let mut remaining = Self::exponent();
let mut acc = pallas::Scalar::one();
let bit_count = Self::bit_count();
let first_section_bit_count = bit_count - 1;
let second_section_bit_count = bit_count - first_section_bit_count;
let n = first_section_bit_count + (second_section_bit_count / 8) + 1;
for next_index in 1..=n {
let current_index = next_index - 1;
let limb_index = current_index / 64;
let limb = remaining[limb_index];
let one = (limb & 1) == 1;
let in_second_section = next_index > first_section_bit_count;
if in_second_section || one {
while ready.load(Ordering::SeqCst)
< if next_index > 1 {
current_index
} else {
next_index
}
{}
let squares =
unsafe { transmute::<&[[u64; 4]], &[pallas::Scalar]>(&**(squares.0.get())) };
let elt = squares[current_index];
acc = acc.mul(&elt);
};
remaining[limb_index] = limb >> 1;
}
acc
}
fn forward_step_ltr_addition_chain(&mut self, x: pallas::Scalar) -> pallas::Scalar {
let sqr = |x: pallas::Scalar, i: u32| (0..i).fold(x, |x, _| x.square());
let mul = |x: pallas::Scalar, y| x.mul(y);
let sqr_mul = |x, n, y: pallas::Scalar| y.mul(&sqr(x, n));
let q1 = x;
let q10 = sqr(q1, 1);
let q11 = mul(q10, &q1);
let q101 = mul(q10, &q11);
let q110 = sqr(q11, 1);
let q111 = mul(q110, &q1);
let q1001 = mul(q111, &q10);
let q1111 = mul(q1001, &q110);
let qr2 = sqr_mul(q110, 3, q11);
let qr4 = sqr_mul(qr2, 8, qr2);
let qr8 = sqr_mul(qr4, 16, qr4);
let qr16 = sqr_mul(qr8, 32, qr8);
let qr32 = sqr_mul(qr16, 64, qr16);
let qr32a = sqr_mul(qr32, 5, q1001);
let qr32b = sqr_mul(qr32a, 8, q111);
let qr32c = sqr_mul(qr32b, 4, q1);
let qr32d = sqr_mul(qr32c, 2, qr4);
let qr32e = sqr_mul(qr32d, 7, q11);
let qr32f = sqr_mul(qr32e, 6, q1001);
let qr32g = sqr_mul(qr32f, 3, q101);
let qr32h = sqr_mul(qr32g, 7, q101);
let qr32i = sqr_mul(qr32h, 7, q111);
let qr32j = sqr_mul(qr32i, 4, q111);
let qr32k = sqr_mul(qr32j, 5, q1001);
let qr32l = sqr_mul(qr32k, 5, q101);
let qr32m = sqr_mul(qr32l, 3, q11);
let qr32n = sqr_mul(qr32m, 4, q101);
let qr32o = sqr_mul(qr32n, 3, q101);
let qr32p = sqr_mul(qr32o, 6, q1111);
let qr32q = sqr_mul(qr32p, 4, q1001);
let qr32r = sqr_mul(qr32q, 6, q101);
let qr32s = sqr_mul(qr32r, 37, qr8);
let qr32t = sqr_mul(qr32s, 2, q1);
qr32t
}
// Sequential RTL square-and-multiply.
fn forward_step_rtl_sequential(&mut self, x: pallas::Scalar) -> pallas::Scalar {
(0..254)
.scan(x, |state, _| {
let ret = *state;
*state = (*state).square();
Some(ret)
})
.fold(
(Self::exponent(), pallas::Scalar::one(), 0),
|(mut remaining, acc, count), elt| {
let limb_index = count / 64;
let limb = remaining[limb_index];
let one = (limb & 1) == 1;
let acc = if one { acc.mul(&elt) } else { acc };
remaining[limb_index] = limb >> 1;
(remaining, acc, count + 1)
},
)
.1
}
// Sequential RTL square-and-multiply with optimized addition chain.
fn forward_step_sequential_rtl_addition_chain(&mut self, x: pallas::Scalar) -> pallas::Scalar {
let first_section_bit_count = 128;
let acc = pallas::Scalar::one();
// First section is same as rtl without addition chain.
let (_, acc, _, square_acc) = (0..first_section_bit_count)
.scan(x, |state, _| {
let ret = *state;
*state = (*state).square();
Some(ret)
})
.fold(
(Self::exponent(), acc, 0, pallas::Scalar::zero()),
|(mut remaining, acc, count, _previous_elt), elt| {
let limb_index = count / 64;
let limb = remaining[limb_index];
let one = (limb & 1) == 1;
let acc = if one { acc.mul(&elt) } else { acc };
remaining[limb_index] = limb >> 1;
(remaining, acc, count + 1, elt)
},
);
let square_acc = square_acc.mul(&square_acc.square());
let square_acc = square_acc.mul(&square_acc.square().square().square().square());
let acc = (0..122)
.scan(square_acc, |state, _| {
*state = (*state).square();
Some(*state)
})
.fold((acc, 1), |(acc, count), elt| {
if count % 8 == 1 {
(acc.mul(&elt), count + 1)
} else {
(acc, count + 1)
}
})
.0;
acc
}
}
/// Modulus is that of `Fp`, which is the base field of `Pallas and scalar field of Vesta.
#[derive(Debug)]
pub struct VestaVDF {}
impl RaguVDF<vesta::Scalar> for VestaVDF {
fn new_with_mode(_eval_mode: EvalMode) -> Self {
VestaVDF {}
}
fn element(n: u64) -> vesta::Scalar {
vesta::Scalar::from(n)
}
/// Vesta's inverse_exponent is 5, so we can hardcode this.
fn inverse_step(x: vesta::Scalar) -> vesta::Scalar {
x.mul(&x.square().square())
}
fn forward_step(&mut self, x: vesta::Scalar) -> vesta::Scalar {
let sqr = |x: vesta::Scalar, i: u32| (0..i).fold(x, |x, _| x.square());
let mul = |x: vesta::Scalar, y| x.mul(y);
let sqr_mul = |x, n, y: vesta::Scalar| y.mul(&sqr(x, n));
let p1 = x;
let p10 = sqr(p1, 1);
let p11 = mul(p10, &p1);
let p101 = mul(p10, &p11);
let p110 = sqr(p11, 1);
let p111 = mul(p110, &p1);
let p1001 = mul(p111, &p10);
let p1111 = mul(p1001, &p110);
let pr2 = sqr_mul(p110, 3, p11);
let pr4 = sqr_mul(pr2, 8, pr2);
let pr8 = sqr_mul(pr4, 16, pr4);
let pr16 = sqr_mul(pr8, 32, pr8);
let pr32 = sqr_mul(pr16, 64, pr16);
let pr32a = sqr_mul(pr32, 5, p1001);
let pr32b = sqr_mul(pr32a, 8, p111);
let pr32c = sqr_mul(pr32b, 4, p1);
let pr32d = sqr_mul(pr32c, 2, pr4);
let pr32e = sqr_mul(pr32d, 7, p11);
let pr32f = sqr_mul(pr32e, 6, p1001);
let pr32g = sqr_mul(pr32f, 3, p101);
let pr32h = sqr_mul(pr32g, 5, p1);
let pr32i = sqr_mul(pr32h, 7, p101);
let pr32j = sqr_mul(pr32i, 4, p11);
let pr32k = sqr_mul(pr32j, 8, p111);
let pr32l = sqr_mul(pr32k, 4, p1);
let pr32m = sqr_mul(pr32l, 4, p111);
let pr32n = sqr_mul(pr32m, 9, p1111);
let pr32o = sqr_mul(pr32n, 8, p1111);
let pr32p = sqr_mul(pr32o, 6, p1111);
let pr32q = sqr_mul(pr32p, 2, p11);
let pr32r = sqr_mul(pr32q, 34, pr8);
let pr32s = sqr_mul(pr32r, 2, p1);
pr32s
}
}
// Question: Is this right, or is it the reverse? Which scalar fields' modulus do we want to target?
pub type TargetVDF<'a> = PallasVDF;
#[derive(std::cmp::PartialEq, Debug, Clone, Copy)]
pub struct RoundValue<T> {
pub value: T,
pub round: T,
}
pub trait RaguVDF<F>: Debug
where
F: FieldExt,
{
fn new() -> Self
where
Self: Sized,
{
Self::new_with_mode(Self::default_mode())
}
fn new_with_mode(eval_mode: EvalMode) -> Self;
fn default_mode() -> EvalMode {
EvalMode::LTRSequential
}
#[inline]
/// Exponent used to take a root in the 'slow' direction.
fn exponent() -> [u64; 4] {
F::RESCUE_INVALPHA
}
#[inline]
/// Exponent used in the 'fast' direction.
fn inverse_exponent() -> u64 {
F::RESCUE_ALPHA
}
#[inline]
/// The building block of a round in the slow, 'forward' direction.
fn forward_step_ltr_sequential(&mut self, x: F) -> F {
x.pow_vartime(Self::exponent())
}
#[inline]
/// The building block of a round in the slow, 'forward' direction.
fn forward_step(&mut self, x: F) -> F {
self.forward_step_ltr_sequential(x)
}
#[inline]
/// The building block of a round in the fast, 'inverse' direction.
fn inverse_step(x: F) -> F {
x.pow_vartime([Self::inverse_exponent(), 0, 0, 0])
}
/// one round in the slow/forward direction.
fn round(&mut self, x: RoundValue<F>) -> RoundValue<F> {
RoundValue {
// Increment the value by the round number so problematic values
// like 0 and 1 don't consistently defeat the asymmetry.
value: self.forward_step(F::add(x.value, x.round)),
// Increment the round.
round: F::add(x.round, F::one()),
}
}
/// One round in the fast/inverse direction.
fn inverse_round(x: RoundValue<F>) -> RoundValue<F> {
RoundValue {
value: F::add(F::sub(Self::inverse_step(x.value), x.round), F::one()),
round: F::sub(x.round, F::one()),
}
}
/// Evaluate input `x` with time/difficulty parameter, `t` in the
/// slow/forward direction.
fn eval(&mut self, x: RoundValue<F>, t: u64) -> RoundValue<F> {
self.simple_eval(x, t)
}
fn simple_eval(&mut self, x: RoundValue<F>, t: u64) -> RoundValue<F> {
(0..t).fold(x, |acc, _| self.round(acc))
}
/// Invert evaluation of output `x` with time/difficulty parameter, `t` in
/// the fast/inverse direction.
fn inverse_eval(x: RoundValue<F>, t: u64) -> RoundValue<F> {
(0..t).fold(x, |acc, _| Self::inverse_round(acc))
}
/// Quickly check that `result` is the result of having slowly evaluated
/// `original` with time/difficulty parameter `t`.
fn check(result: RoundValue<F>, t: u64, original: RoundValue<F>) -> bool {
original == Self::inverse_eval(result, t)
}
fn element(n: u64) -> F;
}
#[derive(Debug)]
pub struct VanillaVDFProof<V: RaguVDF<F> + Debug, F: FieldExt> {
result: RoundValue<F>,
t: u64,
_v: PhantomData<V>,
}
impl<V: RaguVDF<F>, F: FieldExt> VanillaVDFProof<V, F> {
pub fn eval_and_prove(x: RoundValue<F>, t: u64) -> Self {
let mut vdf = V::new();
let result = vdf.eval(x, t);
Self {
result,
t,
_v: PhantomData::<V>,
}
}
pub fn eval_and_prove_with_mode(eval_mode: EvalMode, x: RoundValue<F>, t: u64) -> Self {
let mut vdf = V::new_with_mode(eval_mode);
let result = vdf.eval(x, t);
Self {
result,
t,
_v: PhantomData::<V>,
}
}
pub fn result(&self) -> RoundValue<F> {
self.result
}
pub fn verify(&self, original: RoundValue<F>) -> bool {
V::check(self.result, self.t, original)
}
pub fn append(&self, other: Self) -> Option<Self> {
if other.verify(self.result) {
Some(Self {
result: other.result,
t: self.t + other.t,
_v: PhantomData::<V>,
})
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::SeedableRng;
use rand_xorshift::XorShiftRng;
#[test]
fn test_exponents() {
test_exponents_aux::<PallasVDF, pallas::Scalar>();
test_exponents_aux::<VestaVDF, vesta::Scalar>();
}
fn test_exponents_aux<V: RaguVDF<F>, F: FieldExt>() {
assert_eq!(V::inverse_exponent(), 5);
assert_eq!(V::inverse_exponent(), 5);
}
#[test]
fn test_steps() {
test_steps_aux::<PallasVDF, pallas::Scalar>();
test_steps_aux::<VestaVDF, vesta::Scalar>();
}
fn test_steps_aux<V: RaguVDF<F>, F: FieldExt>() {
let mut rng = XorShiftRng::from_seed(TEST_SEED);
let mut vdf = V::new();
for _ in 0..100 {
let x = F::random(&mut rng);
let y = vdf.forward_step(x);
let z = V::inverse_step(y);
assert_eq!(x, z);
}
}
#[test]
fn test_eval() {
println!("top");
test_eval_aux::<PallasVDF, pallas::Scalar>();
}
fn test_eval_aux<V: RaguVDF<F>, F: FieldExt>() {
for mode in EvalMode::all().iter() {
test_eval_aux2::<V, F>(*mode)
}
}
fn test_eval_aux2<V: RaguVDF<F>, F: FieldExt>(eval_mode: EvalMode) {
let mut rng = XorShiftRng::from_seed(TEST_SEED);
let mut vdf = V::new_with_mode(eval_mode);
for _ in 0..1 {
let t = 10;
let value = F::random(&mut rng);
let round = F::random(&mut rng);
let x = RoundValue { value, round };
let y = vdf.eval(x, t);
let z = V::inverse_eval(y, t);
assert_eq!(x, z);
assert!(V::check(y, t, x));
}
}
#[test]
fn test_vanilla_proof() {
test_vanilla_proof_aux::<PallasVDF, pallas::Scalar>();
test_vanilla_proof_aux::<VestaVDF, vesta::Scalar>();
}
fn test_vanilla_proof_aux<V: RaguVDF<F>, F: FieldExt>() {
let mut rng = XorShiftRng::from_seed(TEST_SEED);
let value = F::random(&mut rng);
let round = F::zero();
let x = RoundValue { value, round };
let t = 12;
let n = 11;
let first_proof = VanillaVDFProof::<V, F>::eval_and_prove(x, t);
let final_proof = (1..11).fold(first_proof, |acc, _| {
let new_proof = VanillaVDFProof::<V, F>::eval_and_prove(acc.result, t);
acc.append(new_proof).expect("failed to append proof")
});
assert_eq!(V::element(final_proof.t), final_proof.result.round);
assert_eq!(n * t, final_proof.t);
assert!(final_proof.verify(x));
}
}
|
// Generated by `scripts/generate.js`
pub type VkQueryPoolCreateInfo = super::super::intel::VkQueryPoolPerformanceQueryCreateInfo;
#[doc(hidden)]
pub type RawVkQueryPoolCreateInfo = super::super::intel::RawVkQueryPoolPerformanceQueryCreateInfo; |
#![allow(non_snake_case)]
use std::error::Error;
use crate::{
math::{FromCSV, Matrix, Vector},
regressor::{
config::Config,
regressor::Regressor,
},
};
pub mod regressor;
pub mod math;
#[cfg(test)]
pub mod test;
fn main() -> Result<(), Box<dyn Error>> {
let X = Matrix::read("./data/train/X.csv")?;
let y = Vector::read("./data/train/y.csv")?;
let regressor = Config::default()
.alpha(2e-6)
.eta(1e-2)
.iterations(1000)
.stumble(12)
.tolerance(1e-3)
.verbose(true)
.to_SGD()
.fit(X, y);
let X = Matrix::read("./data/test/X.csv")?;
let y = Vector::read("./data/test/y.csv")?;
println!("MSE: {:.05}", regressor.mse(&X, &y));
println!("R2 Score: {:.05}", regressor.score(&X, &y));
Ok(())
}
|
use glium::texture::Texture2d;
pub struct Widget_Base {
pub position: (f32, f32),
pub size: (f32, f32),
pub color: (u8, u8, u8),
pub texture: Texture2d
}
pub struct Button{
pub base: Widget_Base,
pub text: String
}
|
//! [RefCell<T>] and the Interior Mutability Pattern, part 2
//!
//! [refcell<t>]: https://doc.rust-lang.org/book/ch15-05-interior-mutability.html
use std::{cell::RefCell, fmt::Debug, rc::Rc};
#[derive(Debug)]
pub enum List<T: Debug> {
Cons(Rc<RefCell<T>>, Rc<List<T>>),
Nil,
}
impl<T: Debug> List<T> {
pub fn new(x: T) -> Self {
List::Cons(Rc::new(RefCell::new(x)), Rc::new(List::Nil))
}
}
#[cfg(test)]
mod tests {
use super::List::{self, Cons, Nil};
use std::{cell::RefCell, rc::Rc};
#[test]
fn new() {
match List::new(5) {
Cons(node, _) => assert_eq!(5, *node.borrow()),
Nil => panic!("unexpected nil"),
}
}
#[test]
fn update_value() {
let value = Rc::new(RefCell::new(5));
let a = Rc::new(Cons(Rc::clone(&value), Rc::new(Nil)));
let _b = Rc::new(Cons(Rc::new(RefCell::new(99)), Rc::clone(&a)));
let _c = Rc::new(Cons(Rc::new(RefCell::new(100)), Rc::clone(&a)));
*value.borrow_mut() += 10;
match &*a {
Cons(node, _) => assert_eq!(15, *node.borrow()),
Nil => panic!("unexpected nil"),
}
}
}
|
use crate::dirgraphsvg::edges::EdgeType;
use crate::dirgraphsvg::{escape_node_id, escape_text, nodes::Node};
use crate::file_utils::{get_filename, get_relative_path, set_extension, translate_to_output_path};
use crate::gsn::{get_levels, GsnNode, Module};
use anyhow::{Context, Result};
use chrono::Utc;
use clap::ArgMatches;
use std::collections::{BTreeMap, HashMap};
use std::io::Write;
use std::path::PathBuf;
#[derive(Default, Eq, PartialEq)]
pub enum RenderLegend {
No,
#[default]
Short,
Full,
}
pub struct RenderOptions {
pub stylesheets: Vec<String>,
pub layers: Vec<String>,
pub legend: RenderLegend,
pub embed_stylesheets: bool,
pub architecture_filename: Option<String>,
pub evidences_filename: Option<String>,
pub complete_filename: Option<String>,
pub output_directory: String,
pub skip_argument: bool,
}
impl From<&ArgMatches> for RenderOptions {
fn from(matches: &ArgMatches) -> Self {
let legend = if matches.get_flag("NO_LEGEND") {
RenderLegend::No
} else if matches.get_flag("FULL_LEGEND") {
RenderLegend::Full
} else {
RenderLegend::Short
};
let layers = matches
.get_many::<String>("LAYERS")
.into_iter()
.flatten()
.cloned()
.collect::<Vec<_>>();
let embed_stylesheets = matches.get_flag("EMBED_CSS");
let stylesheets = matches
.get_many::<String>("STYLESHEETS")
.into_iter()
.flatten()
.map(|css| {
// If stylesheets are not embedded transform their path.
if embed_stylesheets {
css.to_owned()
} else {
let path_css = PathBuf::from(css);
if css.starts_with("http://")
|| css.starts_with("https://")
|| css.starts_with("file://")
{
format!("url({css})")
} else if path_css.is_relative() {
let path = path_css
.canonicalize()
.with_context(|| {
format!("Stylesheet {} is not found.", path_css.display())
})
.unwrap()
.to_string_lossy()
.into_owned();
format!("\"{path}\"")
} else {
format!("\"{css}\"")
}
}
})
.collect::<Vec<_>>();
RenderOptions {
stylesheets,
layers,
legend,
embed_stylesheets,
architecture_filename: match matches.get_flag("NO_ARCHITECTURE_VIEW") {
true => None,
false => matches
.get_one::<String>("ARCHITECTURE_VIEW")
.and_then(|p| get_filename(p))
.map(|f| f.to_owned()),
},
evidences_filename: match matches.get_flag("NO_EVIDENCES") {
true => None,
false => matches
.get_one::<String>("EVIDENCES")
.and_then(|p| get_filename(p))
.map(|f| f.to_owned()),
},
complete_filename: match matches.get_flag("NO_COMPLETE_VIEW") {
true => None,
false => matches
.get_one::<String>("COMPLETE_VIEW")
.and_then(|p| get_filename(p))
.map(|f| f.to_owned()),
},
output_directory: matches
.get_one::<String>("OUTPUT_DIRECTORY")
.unwrap()
.to_owned(), // Default value is used.
skip_argument: matches.get_flag("NO_ARGUMENT_VIEW"),
}
}
}
///
///
///
///
///
pub fn svg_from_gsn_node(id: &str, gsn_node: &GsnNode, layers: &[String]) -> Node {
let classes = node_classes_from_node(gsn_node);
// Add layer to node output
let node_text = node_text_from_node_and_layers(gsn_node, layers);
// Create node
match id {
id if id.starts_with('G') => Node::new_goal(
id,
&node_text,
gsn_node.undeveloped.unwrap_or(false),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with("Sn") => {
Node::new_solution(id, &node_text, gsn_node.url.to_owned(), classes)
}
id if id.starts_with('S') => Node::new_strategy(
id,
&node_text,
gsn_node.undeveloped.unwrap_or(false),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with('C') => {
Node::new_context(id, &node_text, gsn_node.url.to_owned(), classes)
}
id if id.starts_with('A') => {
Node::new_assumption(id, &node_text, gsn_node.url.to_owned(), classes)
}
id if id.starts_with('J') => {
Node::new_justification(id, &node_text, gsn_node.url.to_owned(), classes)
}
_ => unreachable!(),
}
}
///
/// Create SVG node text from GsnNode and layer information
///
///
fn node_text_from_node_and_layers(gsn_node: &GsnNode, layers: &[String]) -> String {
let mut node_text = gsn_node.text.to_owned();
let mut additional_text = vec![];
for layer in layers {
if let Some(layer_text) = gsn_node.additional.get(layer) {
additional_text.push(format!(
"\n{}: {}",
layer.to_ascii_uppercase(),
layer_text.replace('\n', " ")
));
}
}
if !additional_text.is_empty() {
node_text.push_str("\n\n");
node_text.push_str(&additional_text.join("\n"));
}
node_text
}
///
///
///
fn node_classes_from_node(gsn_node: &GsnNode) -> Vec<String> {
let layer_classes: Option<Vec<String>> = gsn_node
.additional
.keys()
.map(|k| {
let mut t = escape_text(&k.to_ascii_lowercase());
t.insert_str(0, "gsn_");
Some(t.to_owned())
})
.collect();
let mut mod_class = gsn_node.module.to_owned();
mod_class.insert_str(0, "gsn_module_");
let classes = gsn_node
.classes
.iter()
.chain(layer_classes.iter())
.flatten()
.chain(&[mod_class])
.cloned()
.collect();
classes
}
///
///
///
///
///
pub fn away_svg_from_gsn_node(
id: &str,
gsn_node: &GsnNode,
module: &Module,
source_module: &Module,
layers: &[String],
) -> Result<Node> {
let classes = node_classes_from_node(gsn_node);
let mut module_url = get_relative_path(
&module.relative_module_path,
&source_module.relative_module_path,
Some("svg"),
)?;
module_url.push('#');
module_url.push_str(&escape_node_id(id));
// Add layer to node output
let node_text = node_text_from_node_and_layers(gsn_node, layers);
// Create node
Ok(match id {
id if id.starts_with('G') => Node::new_away_goal(
id,
&node_text,
&gsn_node.module,
Some(module_url),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with("Sn") => Node::new_away_solution(
id,
&node_text,
&gsn_node.module,
Some(module_url),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with('S') => Node::new_strategy(
id,
&node_text,
gsn_node.undeveloped.unwrap_or(false),
Some(module_url), // Use module_url if Strategy is not defined in current module.
classes,
),
id if id.starts_with('C') => Node::new_away_context(
id,
&node_text,
&gsn_node.module,
Some(module_url),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with('A') => Node::new_away_assumption(
id,
&node_text,
&gsn_node.module,
Some(module_url),
gsn_node.url.to_owned(),
classes,
),
id if id.starts_with('J') => Node::new_away_justification(
id,
&node_text,
&gsn_node.module,
Some(module_url),
gsn_node.url.to_owned(),
classes,
),
_ => unreachable!(), // Prefixes are checked during validation.
})
}
///
///
///
pub fn render_architecture(
output: &mut impl Write,
modules: &HashMap<String, Module>,
dependencies: BTreeMap<String, BTreeMap<String, EdgeType>>,
render_options: &RenderOptions,
architecture_path: &str,
output_path: &str,
) -> Result<()> {
let mut dg = crate::dirgraphsvg::DirGraph::default();
let svg_nodes: BTreeMap<String, Node> = modules
.iter()
.filter(|(k, _)| dependencies.contains_key(k.to_owned()))
.map(|(k, module)| {
(
k.to_owned(),
Node::new_module(
k,
module
.meta
.brief
.as_ref()
.map(|m| m.to_owned())
.unwrap_or_else(|| "".to_owned())
.as_str(),
{
let target_svg = set_extension(&module.relative_module_path, "svg");
let target_path = translate_to_output_path(output_path, &target_svg, None);
get_relative_path(
&target_path.unwrap(), // TODO remove unwraps
architecture_path,
None, // is already made "svg" above
)
.ok()
},
vec![],
),
)
})
.collect();
let mut edges: BTreeMap<String, Vec<(String, EdgeType)>> = dependencies
.into_iter()
.map(|(k, v)| (k, Vec::from_iter(v.into_iter())))
.collect();
dg = dg
.add_nodes(svg_nodes)
.add_edges(&mut edges)
.embed_stylesheets(render_options.embed_stylesheets)
.add_css_stylesheets(
&mut render_options
.stylesheets
.iter()
.map(AsRef::as_ref)
.collect(),
);
dg.write(output, true)?;
Ok(())
}
///
/// Render all nodes in one diagram
///
/// TODO mask modules MASK_MODULE
///
pub fn render_complete(
output: &mut impl Write,
nodes: &BTreeMap<String, GsnNode>,
render_options: &RenderOptions,
) -> Result<()> {
// let masked_modules_opt = matches
// .values_of("MASK_MODULE")
// .map(|x| x.map(|y| y.to_owned()).collect::<Vec<String>>());
// let masked_modules = masked_modules_opt.iter().flatten().collect::<Vec<_>>();
let mut dg = crate::dirgraphsvg::DirGraph::default();
let mut edges: BTreeMap<String, Vec<(String, EdgeType)>> = nodes
.iter()
// .filter(|(_, node)| !masked_modules.contains(&&node.module))
// TODO continue masking here
.map(|(id, node)| (id.to_owned(), node.get_edges()))
.collect();
let svg_nodes: BTreeMap<String, Node> = nodes
.iter()
.map(|(id, node)| {
(
id.to_owned(),
svg_from_gsn_node(id, node, &render_options.layers),
)
})
.collect();
dg = dg
.add_nodes(svg_nodes)
.add_edges(&mut edges)
.add_levels(&get_levels(nodes))
.embed_stylesheets(render_options.embed_stylesheets)
.add_css_stylesheets(
&mut render_options
.stylesheets
.iter()
.map(AsRef::as_ref)
.collect(),
);
dg.write(output, false)?;
Ok(())
}
///
/// Render all nodes in one diagram
///
/// 1) Map gsn nodes to svg nodes
/// foreign module nodes will be mapped to the away svg node
/// 2) Replace the edges with the right ones
/// 3) filter all foreign modules that have no edge to this module
///
pub fn render_argument(
output: &mut impl Write,
module_name: &str,
modules: &HashMap<String, Module>,
nodes: &BTreeMap<String, GsnNode>,
render_options: &RenderOptions,
) -> Result<()> {
let mut dg = crate::dirgraphsvg::DirGraph::default();
let mut svg_nodes: BTreeMap<String, Node> = nodes
.iter()
.filter(|(_, node)| node.module == module_name)
.map(|(id, node)| {
(
id.to_owned(),
svg_from_gsn_node(id, node, &render_options.layers),
)
})
.collect();
svg_nodes.append(
&mut nodes
.iter()
.filter(|(_, node)| node.module != module_name)
.map(|(id, node)| {
Ok((
id.to_owned(),
away_svg_from_gsn_node(
id,
node,
// unwraps are ok, since node.module and modules are consistently created
modules.get(&node.module).unwrap(),
modules.get(module_name).unwrap(),
&render_options.layers,
)?,
))
})
.collect::<Result<BTreeMap<_, _>>>()?,
);
let mut edges: BTreeMap<String, Vec<(String, EdgeType)>> = nodes
.iter()
.map(|(id, node)| {
(
id.to_owned(),
node.get_edges()
.into_iter()
.filter(|(target, _)| {
!(node.module != module_name
// unwrap is ok, since all references are checked at the beginning
&& nodes.get(target).unwrap().module != module_name)
})
.collect::<Vec<(String, EdgeType)>>(),
)
})
.filter(|(_, targets)| !targets.is_empty())
.collect();
svg_nodes.retain(|id, _| {
edges.contains_key(id)
|| edges.values().flatten().any(|(x, _)| x == id)
// unwrap is ok, since all references are checked at the beginning
|| nodes.get(id).unwrap().module == module_name
});
dg = dg
.add_nodes(svg_nodes)
.add_edges(&mut edges)
.add_levels(&get_levels(nodes))
.embed_stylesheets(render_options.embed_stylesheets)
.add_css_stylesheets(
&mut render_options
.stylesheets
.iter()
.map(AsRef::as_ref)
.collect(),
);
// Add meta information if requested
if render_options.legend != RenderLegend::No {
let mut meta_info = vec![format!("Generated on: {}", Utc::now())];
if let Some(meta) = &modules.get(module_name).map(|x| &x.meta) {
meta_info.insert(0, format!("Module: {}", meta.name));
if let Some(brief) = &meta.brief {
meta_info.insert(1, brief.to_owned());
}
if render_options.legend == RenderLegend::Full {
let add = format!("{:?}", meta.additional);
meta_info.append(&mut add.lines().map(|x| x.to_owned()).collect::<Vec<String>>());
}
}
dg = dg.add_meta_information(&mut meta_info);
}
dg.write(output, false)?;
Ok(())
}
///
/// Output list of evidences.
///
/// No template engine is used in order to keep dependencies to a minimum.
///
///
pub(crate) fn render_evidences(
output: &mut impl Write,
nodes: &BTreeMap<String, GsnNode>,
render_options: &RenderOptions,
) -> Result<()> {
writeln!(output)?;
writeln!(output, "List of Evidences")?;
writeln!(output)?;
let mut solutions: Vec<(&String, &GsnNode)> = nodes
.iter()
.filter(|(id, _)| id.starts_with("Sn"))
.collect();
solutions.sort_by_key(|(k, _)| *k);
if solutions.is_empty() {
writeln!(output, "No evidences found.")?;
}
let width = (solutions.len() as f32).log10().ceil() as usize;
for (i, (id, node)) in solutions.into_iter().enumerate() {
writeln!(
output,
"{:>width$}. {}: {}",
i + 1,
id,
node.text
.replace('\n', &format!("\n{: >w$}", ' ', w = width + 4 + id.len()))
)?;
let width = width + 2;
writeln!(output)?;
writeln!(output, "{: >width$}{}", ' ', node.module)?;
writeln!(output)?;
if let Some(url) = &node.url {
writeln!(output, "{: >width$}{}", ' ', url)?;
writeln!(output)?;
}
for (layer, text) in node
.additional
.iter()
.filter(|(l, _)| render_options.layers.iter().any(|x| &x == l))
{
writeln!(
output,
"{: >width$}{}: {}",
' ',
layer.to_ascii_uppercase(),
text.replace(
'\n',
&format!("\n{: >w$}", ' ', w = width + 2 + layer.len())
)
)?;
writeln!(output)?;
}
}
Ok(())
}
#[cfg(test)]
mod test {
use std::collections::BTreeMap;
use crate::gsn::GsnNode;
use super::svg_from_gsn_node;
#[test]
#[should_panic]
fn cover_unreachable() {
let gsn_node = GsnNode {
text: "".to_owned(),
in_context_of: None,
supported_by: None,
undeveloped: None,
classes: None,
url: None,
level: None,
additional: BTreeMap::new(),
module: "".to_owned(),
};
svg_from_gsn_node("X2", &gsn_node, &[]);
}
}
|
//! Contains structures for managing runtime configuration data.
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml;
use iron::typemap::Key;
/// API key used to programmatically upload files.
#[derive(Deserialize)]
pub struct APIKey {
pub key: String,
pub comment: Option<String>,
}
/// A single element of a user's credentials.
#[derive(Deserialize)]
pub struct UserCredentials {
/// The user's username
pub username: String,
/// The user's password, hashed using SHA256
pub password: String,
}
/// The config file contains configurable runtime properties, as well as user credentials.
#[derive(Deserialize)]
pub struct Config {
pub bind_addr: String,
pub external_url: String,
pub base_path: String,
pub api_keys: Vec<APIKey>,
pub users: Vec<UserCredentials>,
pub key: String,
}
impl Config {
/// Loads a config file from the specified file.
pub fn from_file(path: &str) -> Result<Config, String> {
let path = Path::new(path);
let mut config_file = match File::open(&path) {
Ok(file) => file,
Err(err) => return Err(err.description().to_string()),
};
let mut config_contents = String::new();
match config_file.read_to_string(&mut config_contents) {
Ok(_) => (),
Err(err) => return Err(err.description().to_string()),
}
match toml::from_str(&config_contents) {
Ok(config) => Ok(config),
Err(serde_error) => Err(serde_error.description().to_string()),
}
}
}
/// Container used when shipping around the configuration in the web application.
#[derive(Copy, Clone)]
pub struct ConfigContainer;
impl Key for ConfigContainer {
type Value = Config;
}
|
use structopt::StructOpt;
#[derive(StructOpt)]
pub struct Cli {
#[structopt(short, long)]
pub weight: f32,
#[structopt(short, long)]
pub planet: String,
}
pub fn get_args() -> Cli {
Cli::from_args()
}
|
use crate::entities::{Submit, Testcase};
use crate::{db::DbPool, entities::Problem};
use anyhow::Result;
use async_trait::async_trait;
use chrono::prelude::*;
use sqlx::{MySql, Transaction};
#[async_trait]
pub trait SubmitRepository {
async fn get_submits(&mut self) -> Result<Submit>;
async fn update_status(&mut self, id: i64, status: &str) -> Result<u64>;
}
#[async_trait]
impl SubmitRepository for Transaction<'_, MySql> {
async fn get_submits(&mut self) -> Result<Submit> {
let submits = sqlx::query_as(
r#"
SELECT
id
, user_id
, problem_id
, path
, status
, point
, execution_time
, execution_memory
, compile_error
, lang
, created_at
, updated_at
, deleted_at
FROM
submits
WHERE
(status = 'WJ' OR status = 'WR')
AND deleted_at IS NULL
ORDER BY
updated_at ASC
LIMIT
1
FOR UPDATE
"#,
)
.fetch_one(self)
.await?;
Ok(submits)
}
async fn update_status(&mut self, id: i64, status: &str) -> Result<u64> {
let result = sqlx::query!(
r#"
UPDATE submits
SET
status = ?
WHERE
id = ?
"#,
status,
id,
)
.execute(self)
.await?;
Ok(result.rows_affected())
}
}
#[async_trait]
pub trait ProblemsRepository {
async fn fetch_problem(&self, problem_id: i64) -> Result<Problem>;
}
#[async_trait]
impl ProblemsRepository for DbPool {
async fn fetch_problem(&self, problem_id: i64) -> Result<Problem> {
let problems = sqlx::query_as!(
Problem,
r#"
SELECT
id
, slug
, name
, contest_id
, writer_user_id
, position
, uuid
, difficulty
, `statement`
, `constraints`
, input_format
, output_format
, created_at
, updated_at
, deleted_at
, checker_path
, execution_time_limit
FROM
problems
WHERE
id = ?
AND deleted_at IS NULL
"#,
problem_id,
)
.fetch_one(self)
.await?;
Ok(problems)
}
}
#[async_trait]
pub trait TestcasesRepository {
async fn fetch_testcases(&self, problem_id: i64) -> Result<Vec<Testcase>>;
}
#[async_trait]
impl TestcasesRepository for DbPool {
async fn fetch_testcases(&self, problem_id: i64) -> Result<Vec<Testcase>> {
let testcases = sqlx::query_as!(
Testcase,
r#"
SELECT
id
, problem_id
, name
, input
, output
, explanation
, created_at
, updated_at
, deleted_at
FROM
testcases
WHERE
problem_id = ?
AND deleted_at IS NULL
"#,
problem_id,
)
.fetch_all(self)
.await?;
Ok(testcases)
}
}
#[async_trait]
pub trait TestcaseResultsRepository {
async fn delete_testcase_results(&self, submit_id: i64) -> Result<()>;
}
#[async_trait]
impl TestcaseResultsRepository for DbPool {
async fn delete_testcase_results(&self, submit_id: i64) -> Result<()> {
sqlx::query(
r#"
UPDATE
testcase_results
SET
deleted_at = ?
WHERE
submit_id = ?
AND deleted_at IS NULL
"#,
)
.bind(Local::now().naive_local())
.bind(submit_id)
.execute(self)
.await?;
Ok(())
}
}
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io::net::ip::IpAddr;
use std::mem;
use std::rt::task::BlockedTask;
use libc::c_int;
use libc;
use {uvll, raw, UvResult, UvError, EventLoop};
use raw::Request;
struct Data {
blocker: Option<BlockedTask>,
status: libc::c_int,
addrinfo: Option<AddrInfo>,
}
struct AddrInfo {
handle: *const libc::addrinfo,
}
struct GetAddrInfo {
handle: raw::GetAddrInfo,
}
/// Synchronous DNS resolution
///
/// See [`std::io::net::get_host_addresses`][1]
///
/// [1]: http://doc.rust-lang.org/std/io/net/addrinfo/fn.get_host_addresses.html
pub fn get_host_addresses(host: &str) -> UvResult<Vec<IpAddr>> {
let mut eloop = try!(EventLoop::borrow());
get_host_addresses_on(&mut *eloop, host)
}
/// Same as `get_host_addresses`, but specifies what event loop to run on.
pub fn get_host_addresses_on(eloop: &mut EventLoop,
host: &str) -> UvResult<Vec<IpAddr>> {
let mut req = unsafe { GetAddrInfo { handle: Request::alloc() } };
let mut data = Data {
blocker: None,
status: 0,
addrinfo: None,
};
req.handle.set_data(&mut data as *mut _ as *mut _);
unsafe {
try!(req.handle.send(&eloop.uv_loop(), Some(host), None, callback));
::block(eloop.uv_loop(), |task| {
data.blocker = Some(task);
});
}
if data.status < 0 { return Err(UvError(data.status)) }
let addrinfo = data.addrinfo.unwrap();
unsafe {
let mut addr = addrinfo.handle;
let mut addrs = Vec::new();
loop {
let rustaddr = raw::sockaddr_to_addr(mem::transmute((*addr).ai_addr),
(*addr).ai_addrlen as uint);
addrs.push(rustaddr.ip);
if (*addr).ai_next.is_not_null() {
addr = (*addr).ai_next as *const _;
} else {
break;
}
}
Ok(addrs)
}
}
extern fn callback(req: *mut uvll::uv_getaddrinfo_t,
status: libc::c_int,
res: *const libc::addrinfo) {
assert!(status != uvll::ECANCELED);
let req: raw::GetAddrInfo = unsafe { Request::from_raw(req) };
let data: &mut Data = unsafe { mem::transmute(req.get_data()) };
data.status = status;
data.addrinfo = Some(AddrInfo { handle: res });
::wakeup(&mut data.blocker);
}
impl Drop for AddrInfo {
fn drop(&mut self) {
unsafe { uvll::uv_freeaddrinfo(self.handle as *mut _) }
}
}
impl Drop for GetAddrInfo {
fn drop(&mut self) {
unsafe { self.handle.free() }
}
}
|
use std::borrow::Cow;
use std::cell::RefCell;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use crate::cfg::local::setup_vars::Vars;
use crate::cfg::local::ArrayVars;
use crate::cfg::setup::SetupCfg;
use crate::cfg::CfgError;
pub type SetupName = String;
#[derive(Debug, Serialize, Deserialize)]
pub struct LocalSetupCfg {
#[serde(skip)]
name: SetupName,
#[serde(skip_serializing_if = "Option::is_none")]
public_env_dir: Option<PathBuf>,
file: PathBuf,
#[serde(skip_serializing_if = "Option::is_none")]
array_vars: Option<Rc<RefCell<ArrayVars>>>,
#[serde(skip_serializing_if = "Option::is_none")]
vars: Option<Rc<RefCell<Vars>>>,
}
impl Clone for LocalSetupCfg {
fn clone(&self) -> Self {
let array_vars = self.array_vars.as_ref().map(|array_vars| {
let array_vars = Rc::clone(array_vars);
let array_vars = (&*array_vars).clone();
Rc::new(array_vars)
});
let vars = self.vars.as_ref().map(|vars| {
let vars = Rc::clone(vars);
let vars = (&*vars).clone();
Rc::new(vars)
});
Self {
name: self.name.clone(),
public_env_dir: self.public_env_dir.clone(),
file: self.file.clone(),
array_vars,
vars,
}
}
}
impl LocalSetupCfg {
pub fn new(name: String, file: PathBuf) -> Self {
let mut local_setup = Self {
name,
public_env_dir: None,
file,
array_vars: None,
vars: None,
};
local_setup.new_array_vars();
local_setup
}
pub fn file(&self) -> &PathBuf {
&self.file
}
pub fn set_file(&mut self, file: PathBuf) {
self.file = file;
}
pub fn new_array_vars(&mut self) -> Rc<RefCell<ArrayVars>> {
let array_vars = ArrayVars::new();
let array_vars = Rc::new(RefCell::new(array_vars));
self.array_vars = Some(Rc::clone(&array_vars));
array_vars
}
pub fn array_vars(&self) -> Option<Rc<RefCell<ArrayVars>>> {
self.array_vars.as_ref().map(|r| Rc::clone(r))
}
pub fn vars(&self) -> Option<Rc<RefCell<Vars>>> {
self.vars.as_ref().map(|r| Rc::clone(r))
}
pub fn public_env_dir(&self) -> Cow<Path> {
match &self.public_env_dir {
Some(dir) => Cow::Borrowed(dir),
None => Cow::Owned(PathBuf::new()),
}
}
pub fn set_public_env_dir(&mut self, dir: PathBuf) {
self.public_env_dir = Some(dir)
}
pub fn unset_public_env_dir(&mut self) -> Result<()> {
if let None = self.public_env_dir {
bail!(CfgError::PublicEnvAlreadyUnset(self.name.clone()))
} else {
self.public_env_dir = None;
Ok(())
}
}
}
impl SetupCfg for LocalSetupCfg {
fn name(&self) -> &String {
&self.name
}
fn set_name(&mut self, name: SetupName) {
self.name = name;
}
}
#[cfg(test)]
mod tests {
use crate::cfg::{ArrayVar, LocalSetupCfg};
#[test]
fn local_cfg_yaml() {
let setup_cfg = LocalSetupCfg::new("setup".into(), "run.sh".into());
let expect = r#"---
file: run.sh
array_vars:
all: ".*"
var2: "*_SUFFIX"
var1: PREFIX_*"#;
let array_vars = setup_cfg.array_vars().unwrap();
let mut array_vars = array_vars.borrow_mut();
array_vars.add(ArrayVar::new("all".into(), ".*".into()));
array_vars.add(ArrayVar::new("var2".into(), "*_SUFFIX".into()));
array_vars.add(ArrayVar::new("var1".into(), "PREFIX_*".into()));
drop(array_vars);
let content = serde_yaml::to_string(&setup_cfg).unwrap();
assert_eq!(expect, content.as_str());
let setup_cfg: LocalSetupCfg = serde_yaml::from_str(content.as_str()).unwrap();
let content = serde_yaml::to_string(&setup_cfg).unwrap();
assert_eq!(expect, content.as_str());
}
}
|
use crate::{map::Key, set::iterators::Iter, Segment, SegmentMap, SegmentSet};
impl<T> SegmentSet<T> {
// TODO: into_union_iter
pub fn union_iter<'a>(&'a self, other: &'a Self) -> Union<'a, T> {
Union {
iter_a: self.iter(),
prev_a: None,
iter_b: other.iter(),
prev_b: None,
}
}
// TODO: into_union
pub fn union<'a>(&'a self, other: &'a Self) -> SegmentSet<&'a T>
where
T: Ord,
{
// Don't need to insert, since we know ranges produced by the iterator
// aren't overlapping
SegmentSet {
map: SegmentMap {
map: self.union_iter(other).map(|r| (Key(r), ())).collect(),
store: alloc::vec::Vec::new(),
},
}
}
}
/// Set Union
impl<T: Ord + Clone> core::ops::BitOr<&SegmentSet<T>> for &SegmentSet<T> {
type Output = SegmentSet<T>;
/// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
///
/// # Examples
///
/// ```
/// use std::collections::BTreeSet;
///
/// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let result = &a | &b;
/// let result_vec: Vec<_> = result.into_iter().collect();
/// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
/// ```
fn bitor(self, rhs: &SegmentSet<T>) -> SegmentSet<T> {
self.union(rhs).cloned()
}
}
/// Set Union
impl<T: Ord + Clone> core::ops::Add<&SegmentSet<T>> for &SegmentSet<T> {
type Output = SegmentSet<T>;
fn add(self, rhs: &SegmentSet<T>) -> SegmentSet<T> {
self.union(rhs).cloned()
}
}
// TODO: Set in-place union (AddAssign and BitOrAssign)
pub struct Union<'a, T> {
iter_a: Iter<'a, T>,
prev_a: Option<Segment<&'a T>>,
iter_b: Iter<'a, T>,
prev_b: Option<Segment<&'a T>>,
}
impl<'a, T: Ord> Iterator for Union<'a, T> {
type Item = Segment<&'a T>;
fn next(&mut self) -> Option<Self::Item> {
let next_a = self
.prev_a
.take()
.or_else(|| self.iter_a.next().map(|x| x.as_ref()));
let next_b = self
.prev_b
.take()
.or_else(|| self.iter_b.next().map(|x| x.as_ref()));
// If one ran out, use the other
let next_a = match next_a {
Some(a) => a,
None => return next_b,
};
let next_b = match next_b {
Some(b) => b,
None => return Some(next_a),
};
// If `a` is fully before `b` return it and hold on to `b`
if next_a.end.cmp_start(&next_b.start).is_gt() {
self.prev_b.insert(next_b);
return Some(next_a);
}
// Likewise the other way around
if next_a.start.cmp_end(&next_b.end).is_gt() {
self.prev_a.insert(next_a);
return Some(next_b);
}
// Otherwise, `a` must overlap `b`. Store the outer bounds
let mut outer = Segment {
start: core::cmp::min(next_a.start, next_b.start),
end: core::cmp::max(next_a.end, next_b.end),
};
// Increase the outer end bound until we have no overlap
loop {
// Get the next end_bound for a touching `a` (holding on to it if
// not touching)
let next_a_end = if let Some(r) = self
.prev_a
.take()
.or_else(|| self.iter_a.next().map(|r| r.as_ref()))
{
if outer.touches(&r) {
Some(r.end)
} else {
self.prev_a.insert(r);
None
}
} else {
None
};
// likewise for `b`
let next_b_end = if let Some(r) = self
.prev_b
.take()
.or_else(|| self.iter_b.next().map(|r| r.as_ref()))
{
if outer.touches(&r) {
Some(r.end)
} else {
self.prev_b.insert(r);
None
}
} else {
None
};
match (next_a_end, next_b_end) {
// If no extensions to make, return
(None, None) => return Some(outer),
// If we only have one, apply it and loop
(Some(end), None) | (None, Some(end)) => outer.end = end,
// If we have both, use the greater (and loop)
(Some(a), Some(b)) => outer.end = core::cmp::max(a, b),
}
}
}
}
|
pub mod warp;
|
#![feature(io)]
extern crate inkwell;
mod lexer;
mod ast;
mod parser;
use std::io::*;
use lexer::{Token, Lexer};
use parser::{Error, Parser};
fn main() {
// TODO: the closure is bad, but the best solution that came up. consider working more on this
let lexer = Lexer::new(Box::new(|| { stdin().chars() }));
let mut parser = Parser {
lexer: Box::new(lexer.peekable()),
};
loop {
match parser.peek() {
None => break,
Some(&Token::Def) => parser.def(),
Some(&Token::Extern) => parser.ext(),
// Some(&Token::Char(';')) => parser.skip(),
_ => parser.expr(),
};
}
} |
use common::*;
use ncollide2d::transformation::convex_hull_idx;
// use ncollide2d::query::closest_points_line_line_parameters;
use rand::prelude::*;
use specs::prelude::*;
use specs_derive::Component;
use voronois::destruction;
pub const EPS: f32 = 1E-3;
pub const SHADOW_LENGTH: f32 = 100f32;
pub const DECONSTRUCT_SHADR_MIN_R: f32 = 0.1f32;
#[derive(Component, Debug, Clone)]
pub enum Geometry {
Circle { radius: f32 },
Polygon(Polygon),
}
#[derive(Component, Debug, Clone, Copy)]
pub struct BlockSegment {
pub point1: Point2,
pub point2: Point2,
}
pub struct NebulaGrid {
pub grid: Grid<bool>,
}
impl NebulaGrid {
pub fn new(n: usize, rw: f32, rh: f32, rbw: f32, rbh: f32) -> Self {
let grid = Grid::new(n, rw, rh, rbw, rbh);
NebulaGrid { grid: grid }
}
}
pub struct PlanetGrid {
pub grid: Grid<bool>,
}
impl PlanetGrid {
pub fn new(n: usize, rw: f32, rh: f32, rbw: f32, rbh: f32) -> Self {
let grid = Grid::new(n, rw, rh, rbw, rbh);
PlanetGrid { grid: grid }
}
}
pub struct StarsGrid {
pub grid: Grid<bool>,
}
impl StarsGrid {
pub fn new(n: usize, rw: f32, rh: f32, rbw: f32, rbh: f32) -> Self {
let grid = Grid::new(n, rw, rh, rbw, rbh);
StarsGrid { grid: grid }
}
}
pub struct FogGrid {
pub grid: Grid<bool>,
}
impl FogGrid {
pub fn new(n: usize, rw: f32, rh: f32, rbw: f32, rbh: f32) -> Self {
let grid = Grid::new(n, rw, rh, rbw, rbh);
FogGrid { grid: grid }
}
}
pub struct Grid<T> {
bricks: Vec<T>,
x: f32,
y: f32,
rw: f32,
rh: f32,
rbw: f32,
rbh: f32,
pub max_w: f32,
pub max_h: f32,
pub n: usize,
pub size: usize,
}
impl<T> Grid<T>
where
T: Default + Clone,
{
pub fn new(n: usize, rw: f32, rh: f32, rbw: f32, rbh: f32) -> Self {
let size = 2 * n + 1;
let bricks = vec![T::default(); size * size];
Self {
bricks: bricks,
x: 0.0,
y: 0.0,
rw: rw,
rh: rh,
rbw: rbw,
rbh: rbh,
max_w: rw + 2.0 * n as f32 * rw,
max_h: rh + 2.0 * n as f32 * rh,
n: n,
size: size,
}
}
pub fn shift(&mut self, x: f32, y: f32) {
self.x += x;
self.y += y;
// shift grid by one cell if necessary
if self.x > self.rw + EPS {
self.x = -self.rw
}
if self.x < -self.rw - EPS {
self.x = self.rw
}
if self.y > self.rh + EPS {
self.y = -self.rh
}
if self.y < -self.rh - EPS {
self.y = self.rh
}
self.bricks = vec![T::default(); self.bricks.len()];
}
pub fn get_column(&self, x: f32) -> usize {
(self.n as i32
+ ((self.rw + x - self.x) / (2.0 * self.rw)).floor() as i32)
as usize
}
pub fn get_row(&self, y: f32) -> usize {
(self.n as i32
+ ((self.rh + y - self.y) / (2.0 * self.rh)).floor() as i32)
as usize
}
pub fn get_rectangle(
&self,
row: usize,
col: usize,
) -> ((f32, f32), (f32, f32)) {
let point = self.get_cell_point(row, col);
return (
(point.x - self.rw + self.rbw, point.x + self.rw - self.rbw),
(point.y - self.rh + self.rbh, point.y + self.rh - self.rbh),
);
}
pub fn get_cell_point(&self, row: usize, column: usize) -> Point2 {
let (row, column) =
(row as f32 - self.n as f32, column as f32 - self.n as f32);
Point2::new(
self.x + column * 2.0 * self.rw,
self.y + row * 2.0 * self.rh,
)
}
pub fn get_cell_value(&self, row: usize, column: usize) -> &T {
&self.bricks[row * self.size + column]
}
pub fn reset(&mut self) {
self.bricks = vec![T::default(); self.bricks.len()];
}
pub fn update(&mut self, point: Point2, value: T) -> Result<(), ()> {
if (point.x - self.x).abs() < self.max_w
&& (point.y - self.y).abs() < self.max_h
{
let id =
self.size * self.get_row(point.y) + self.get_column(point.x);
self.bricks[id] = value;
return Ok(());
}
Err(())
}
}
pub fn generate_convex_polygon(samples_num: usize, size: f32) -> Polygon {
let mut rng = thread_rng();
let mut points = vec![];
for _ in 0..samples_num {
let x = rng.gen_range(-size, size);
// sample from circle
let chord = (size * size - x * x).sqrt();
let y = rng.gen_range(-chord, chord);
points.push(Point2::new(x, y));
}
let ids = convex_hull_idx(&points);
// TODO opt: inplace
let points = {
let mut res = vec![];
for &i in ids.iter() {
res.push(points[i])
}
res
};
Polygon::new(points)
}
// @vlad TODO refactor (it's copy paste from stack overflow)
/// get tangent to circle from point
pub fn get_tangent(
circle: Point2,
r: f32,
point: Point2,
) -> (Option<Point2>, Option<Point2>) {
let npoint = (point - circle) / r;
let xy = npoint.norm_squared();
if xy - 1.0 <= EPS {
return (None, None);
}
let mut discr = npoint.y * (xy - 1f32).sqrt();
let tx0 = (npoint.x - discr) / xy;
let tx1 = (npoint.x + discr) / xy;
let (yt0, yt1) = if npoint.y != 0f32 {
(
circle.y + r * (1f32 - tx0 * npoint.x) / npoint.y,
circle.y + r * (1f32 - tx1 * npoint.x) / npoint.y,
)
} else {
discr = r * (1f32 - tx0 * tx0).sqrt();
(circle.y + discr, circle.y - discr)
};
let xt0 = circle.x + r * tx0;
let xt1 = circle.x + r * tx1;
(Some(Point2::new(xt0, yt0)), Some(Point2::new(xt1, yt1)))
}
#[derive(Debug)]
pub struct Triangulation {
pub points: Vec<Point2>,
pub indicies: Vec<u16>,
}
impl Triangulation {
pub fn new() -> Self {
Self {
points: vec![],
indicies: vec![],
}
}
pub fn apply(&mut self, isometry: Isometry2) {
for p in self.points.iter_mut() {
*p = isometry * *p;
}
}
pub fn translate(&mut self, shift: Vector2) {
for p in self.points.iter_mut() {
*p += shift;
}
}
pub fn extend(&mut self, triangulation: Triangulation) {
self.points.extend(triangulation.points);
let id_shift = self.points.len() as u16;
self.indicies
.extend(triangulation.indicies.iter().map(|x| *x + id_shift));
}
}
pub trait TriangulateFromCenter {
fn points(&self) -> &[Point2];
fn center(&self) -> Point2;
fn triangulate(&self) -> Triangulation {
let mut points = vec![];
points.push(self.center());
points.extend(self.points().iter());
let mut indicies = vec![];
for i in 1..points.len() {
indicies.push(0u16);
indicies.push(i as u16);
let mut si = i as u16 + 1u16;
if si == points.len() as u16 {
si = 1u16
};
indicies.push(si);
}
Triangulation {
points: points,
indicies: indicies,
}
}
}
#[derive(Debug, Component, Clone)]
pub struct Polygon {
pub points: Vec<Point2>,
mass_center: Point2,
pub min_r: f32,
pub max_r: f32,
pub width: f32,
pub height: f32,
}
impl Polygon {
pub fn into_rounded(self, smooth_points: usize) -> Self {
let mut res = vec![];
for i in 0..self.points.len() {
let prev = self.points[if i == 0 {
self.points.len() - 1
} else {
(i - 1) % self.points.len()
}];
let p = self.points[i];
let next = self.points[(i + 1) % self.points.len()];
let edge_vec1 = p.coords - prev.coords;
let edge_vec2 = next.coords - p.coords;
let segment1 = Segment::new(prev, prev + edge_vec1);
let inside_vec =
(-edge_vec1.normalize() + edge_vec2.normalize()) / 2.0;
let d = 1.0
* inside_vec
.dot(&edge_vec1)
.abs()
.min(inside_vec.dot(&edge_vec2).abs());
let inside_vec = d * inside_vec;
let o = p + inside_vec;
let mut h1 = Vector2::new(-edge_vec1.y, edge_vec1.x).normalize();
let mut dbg_flag = false;
{
// try different direction of perpendicular
let ray1 = Ray::new(o, h1);
let ray2 = Ray::new(o, -h1);
// dbg!((&ray1, &segment1));
let toi1 =
segment1.toi_with_ray(&Isometry2::identity(), &ray1, true);
let toi2 =
segment1.toi_with_ray(&Isometry2::identity(), &ray2, true);
match (toi1, toi2) {
(Some(toi), _) => {
h1 = h1 * toi;
dbg_flag = true;
}
(_, Some(toi)) => {
h1 = -h1 * toi;
dbg_flag = true;
}
_ => (),
}
};
if dbg_flag {
let angle = edge_vec1.angle(&edge_vec2);
// if smooth_points == 1 {
// res.push(o + Rotation2::new(-angle / 2.0) * h1);
// continue;
// }
let rotation = Rotation2::new(-angle / (smooth_points as f32));
for _ in 0..=smooth_points {
res.push(o + h1);
h1 = rotation * h1;
}
} else {
res.push(p)
}
}
Self::new(res)
}
pub fn new(mut points: Vec<Point2>) -> Self {
let w = 1.0 / (points.len() as f32);
let mut center = Point2::new(0f32, 0f32);
let mut min_x = 100f32;
let mut max_x = 100f32;
let mut min_y = 100f32;
let mut max_y = 100f32;
for p in points.iter() {
center.x += w * p.x;
center.y += w * p.y;
min_x = min_x.min(p.x);
min_y = min_y.min(p.y);
max_x = max_x.max(p.y);
max_y = max_y.max(p.y);
}
let width = max_x - min_x;
let height = max_y - min_y;
let mut min_r = 10f32;
let mut max_r = 0f32;
for p in points.iter() {
min_r = min_r.min((p - center).norm());
max_r = max_r.max((p - center).norm());
}
if (points[0].coords - center.coords)
.perp(&(points[1].coords - center.coords))
> 0.0
{
points.reverse();
}
Polygon {
points: points,
mass_center: center,
min_r,
max_r,
width: width,
height: height,
}
}
pub fn centralize(&mut self, rot: Rotation2<f32>) {
for p in self.points.iter_mut() {
*p = rot * *p;
p.x -= self.mass_center.x;
p.y -= self.mass_center.y;
}
self.mass_center = Point2::new(0f32, 0f32);
}
pub fn deconstruct(&self, bullet: Point2, sites: usize) -> Vec<Polygon> {
if self.max_r < 1.2 {
return vec![];
}
let mut transofrmed_points = self.points.clone();
let w_div = self.width + 0.05;
let h_div = self.height + 0.05;
for p in transofrmed_points.iter_mut() {
p.x += self.width / 2.0;
p.x /= w_div;
p.y += self.height / 2.0;
p.y /= h_div;
}
let mut bullet =
bullet + Vector2::new(self.width / 2.0, self.height / 2.0);
bullet.x /= w_div;
bullet.y /= h_div;
let bullet = Point2::from(5.0 * bullet.coords);
let (polys, _, _) = destruction(&transofrmed_points, bullet, sites);
let mut res = vec![];
for poly in polys.iter() {
let mut poly = poly.clone();
for p in poly.iter_mut() {
p.x *= w_div;
p.x -= self.width / 2.0;
p.y *= h_div;
p.y -= self.height / 2.0;
}
let poly = Polygon::new(poly.to_vec());
if poly.min_r > DECONSTRUCT_SHADR_MIN_R {
res.push(poly);
}
}
res
}
}
impl TriangulateFromCenter for Polygon {
fn points(&self) -> &[Point2] {
&self.points
}
fn center(&self) -> Point2 {
self.mass_center
}
}
fn x_angle(vec: Vector2) -> f32 {
let a = vec.y.atan2(vec.x);
// if a < 0.0 {
// 2.0 * std::f32::consts::PI + a
// } else {
// a
// }
a
}
pub fn poly_to_segment(poly: Polygon, position: Point2) -> BlockSegment {
let points = &poly.points;
let rotation = Rotation2::rotation_between(
&(points[0].coords + position.coords),
&Vector2::x_axis(),
);
let mut point1 = points[0];
let mut angle1 = x_angle(rotation * (point1.coords + position.coords));
let mut point2 = points[0];
let mut angle2 = x_angle(rotation * (point2.coords + position.coords));
for point in points.iter() {
let cur = rotation * (point.coords + position.coords);
let angle = x_angle(cur);
if angle < angle1 {
angle1 = angle;
point1 = *point;
};
if angle > angle2 {
angle2 = angle;
point2 = *point;
}
}
BlockSegment { point1, point2 }
}
pub fn shadow_geometry(
center: Point2,
geom: Geometry,
position: Point2,
rotation: Rotation2<f32>,
) -> Option<Triangulation> {
let segment = match geom {
Geometry::Circle { radius } => {
let dirs = match get_tangent(position, radius, center) {
(Some(p1), Some(p2)) => Some((
Vector2::new(p2.x - center.x, p2.y - center.y),
Vector2::new(p1.x - center.x, p1.y - center.y),
)),
_ => None, // TODO handle this or what?
};
if let Some((dir1, dir2)) = dirs {
let shape_point1 = center + dir1;
let shape_point2 = center + dir2;
Some(BlockSegment {
point1: shape_point1,
point2: shape_point2,
})
} else {
None
}
}
Geometry::Polygon(mut block_polygon) => {
let points: Vec<Point2> =
block_polygon.points.iter().map(|x| rotation * x).collect();
block_polygon.points = points;
Some(poly_to_segment(block_polygon, position))
}
};
if let Some(segment) = segment {
let dir1 = segment.point1.coords + position.coords - center.coords;
let dir2 = segment.point2.coords + position.coords - center.coords;
let points = vec![
segment.point1,
segment.point2,
segment.point1 + SHADOW_LENGTH * dir1,
segment.point2 + SHADOW_LENGTH * dir2,
];
let indicies = vec![0, 2, 3, 0, 3, 1];
Some(Triangulation {
points: points,
indicies: indicies,
})
} else {
None
}
}
|
// The prime factors of 13195 are 5, 7, 13 and 29.
//
// What is the largest prime factor of the number 600851475143 ?
fn main() {
let max = 600851475143usize;
let mut curr_max = max;
let mut fact = 0;
let mut i = 2;
while i * i <= curr_max {
if curr_max % i == 0 {
curr_max = curr_max / i;
fact = i;
}
i += 1;
}
if curr_max > fact {
fact = curr_max;
}
println!("Largest prime factor: {}", fact);
}
|
use std::collections::HashMap;
/// A symbol is a reference to an entry in SymbolTable
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct Symbol(u32);
impl Symbol {
pub fn new(name: u32) -> Symbol {
Symbol(name)
}
pub fn to_usize(&self) -> usize {
self.0 as usize
}
}
/// A SymbolTable is a bi-directional mapping between strings and symbols
#[derive(Debug)]
pub struct SymbolTable {
names: HashMap<Box<str>, Symbol>,
strings: Vec<Box<str>>,
}
impl SymbolTable {
pub fn new() -> SymbolTable {
SymbolTable {
names: HashMap::new(),
strings: Vec::new(),
}
}
pub fn get_symbol_for(&mut self, string: &str) -> Symbol {
// check if already presents
if let Some(&name) = self.names.get(string) {
return name;
}
// insert in symboltable
let name = Symbol(self.strings.len() as u32);
let copy = string.to_string().into_boxed_str();
self.strings.push(copy.clone());
self.names.insert(copy, name);
name
}
pub fn get_string(&self, symbol: Symbol) -> &str {
self.strings[symbol.to_usize()].as_ref()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn symbol_table() {
let mut symboltable = SymbolTable::new();
let sym_a = symboltable.get_symbol_for("a");
let sym_b = symboltable.get_symbol_for("b");
assert_ne!(sym_a, sym_b);
assert_eq!(sym_a, symboltable.get_symbol_for("a"));
assert_eq!(symboltable.get_string(sym_a), "a");
assert_eq!(symboltable.get_string(sym_b), "b");
}
}
|
extern crate raster;
mod elapsed_metrics;
use elapsed_metrics::ElapsedMetrics;
use raster::{editor, ResizeMode};
use std::env;
use std::error::Error;
fn main() -> Result<(), Box<Error>> {
// Args arrangement
let mut args = env::args().skip(1);
assert_eq!(args.len(), 3, "Arguments must be: file_location width height");
let mut metrics = ElapsedMetrics::new();
// Reading args
let file_location = args.next().unwrap();
let width = args.next().unwrap().parse()?;
let height = args.next().unwrap().parse()?;
// Do the job
metrics.start();
let mut image = raster::open(file_location.as_str()).unwrap();
metrics.set_load_time();
metrics.start();
editor::resize(&mut image, width, height, ResizeMode::Fill).unwrap();
metrics.set_resize_time();
metrics.start();
// Create output file name and mime.
let mut out_file_name = "raster-out.".to_string();
let mime = file_location.char_indices().rev().map(|(i, _)| i).nth(2).unwrap();
let file_type = &file_location[mime..];
out_file_name.push_str(file_type);
// Save resized image
raster::save(&image, out_file_name.as_str()).unwrap();
metrics.set_save_time();
// All was ok
println!("{:?}", metrics);
Ok(())
}
|
//! Module contains wifi-related structures, enumerations and functions
use crate::{
ffi::*,
error::*,
system_event::*,
network_adapter,
};
pub type wifi_err_reason_t = u32;
pub const wifi_err_reason_t_WIFI_REASON_UNSPECIFIED: wifi_err_reason_t = 1;
pub const wifi_err_reason_t_WIFI_REASON_AUTH_EXPIRE: wifi_err_reason_t = 2;
pub const wifi_err_reason_t_WIFI_REASON_AUTH_LEAVE: wifi_err_reason_t = 3;
pub const wifi_err_reason_t_WIFI_REASON_ASSOC_EXPIRE: wifi_err_reason_t = 4;
pub const wifi_err_reason_t_WIFI_REASON_ASSOC_TOOMANY: wifi_err_reason_t = 5;
pub const wifi_err_reason_t_WIFI_REASON_NOT_AUTHED: wifi_err_reason_t = 6;
pub const wifi_err_reason_t_WIFI_REASON_NOT_ASSOCED: wifi_err_reason_t = 7;
pub const wifi_err_reason_t_WIFI_REASON_ASSOC_LEAVE: wifi_err_reason_t = 8;
pub const wifi_err_reason_t_WIFI_REASON_ASSOC_NOT_AUTHED: wifi_err_reason_t = 9;
pub const wifi_err_reason_t_WIFI_REASON_DISASSOC_PWRCAP_BAD: wifi_err_reason_t = 10;
pub const wifi_err_reason_t_WIFI_REASON_DISASSOC_SUPCHAN_BAD: wifi_err_reason_t = 11;
pub const wifi_err_reason_t_WIFI_REASON_IE_INVALID: wifi_err_reason_t = 13;
pub const wifi_err_reason_t_WIFI_REASON_MIC_FAILURE: wifi_err_reason_t = 14;
pub const wifi_err_reason_t_WIFI_REASON_4WAY_HANDSHAKE_TIMEOUT: wifi_err_reason_t = 15;
pub const wifi_err_reason_t_WIFI_REASON_GROUP_KEY_UPDATE_TIMEOUT: wifi_err_reason_t = 16;
pub const wifi_err_reason_t_WIFI_REASON_IE_IN_4WAY_DIFFERS: wifi_err_reason_t = 17;
pub const wifi_err_reason_t_WIFI_REASON_GROUP_CIPHER_INVALID: wifi_err_reason_t = 18;
pub const wifi_err_reason_t_WIFI_REASON_PAIRWISE_CIPHER_INVALID: wifi_err_reason_t = 19;
pub const wifi_err_reason_t_WIFI_REASON_AKMP_INVALID: wifi_err_reason_t = 20;
pub const wifi_err_reason_t_WIFI_REASON_UNSUPP_RSN_IE_VERSION: wifi_err_reason_t = 21;
pub const wifi_err_reason_t_WIFI_REASON_INVALID_RSN_IE_CAP: wifi_err_reason_t = 22;
pub const wifi_err_reason_t_WIFI_REASON_802_1X_AUTH_FAILED: wifi_err_reason_t = 23;
pub const wifi_err_reason_t_WIFI_REASON_CIPHER_SUITE_REJECTED: wifi_err_reason_t = 24;
pub const wifi_err_reason_t_WIFI_REASON_BEACON_TIMEOUT: wifi_err_reason_t = 200;
pub const wifi_err_reason_t_WIFI_REASON_NO_AP_FOUND: wifi_err_reason_t = 201;
pub const wifi_err_reason_t_WIFI_REASON_AUTH_FAIL: wifi_err_reason_t = 202;
pub const wifi_err_reason_t_WIFI_REASON_ASSOC_FAIL: wifi_err_reason_t = 203;
pub const wifi_err_reason_t_WIFI_REASON_HANDSHAKE_TIMEOUT: wifi_err_reason_t = 204;
pub const wifi_err_reason_t_WIFI_REASON_BASIC_RATE_NOT_SUPPORT: wifi_err_reason_t = 205;
pub type wifi_mode_t = u32;
pub const wifi_mode_t_WIFI_MODE_NULL: wifi_mode_t = 0;
pub const wifi_mode_t_WIFI_MODE_STA: wifi_mode_t = 1;
pub const wifi_mode_t_WIFI_MODE_AP: wifi_mode_t = 2;
pub const wifi_mode_t_WIFI_MODE_APSTA: wifi_mode_t = 3;
pub const wifi_mode_t_WIFI_MODE_MAX: wifi_mode_t = 4;
pub type wifi_auth_mode_t = u32;
pub const wifi_auth_mode_t_WIFI_AUTH_OPEN: wifi_auth_mode_t = 0;
pub const wifi_auth_mode_t_WIFI_AUTH_WEP: wifi_auth_mode_t = 1;
pub const wifi_auth_mode_t_WIFI_AUTH_WPA_PSK: wifi_auth_mode_t = 2;
pub const wifi_auth_mode_t_WIFI_AUTH_WPA2_PSK: wifi_auth_mode_t = 3;
pub const wifi_auth_mode_t_WIFI_AUTH_WPA_WPA2_PSK: wifi_auth_mode_t = 4;
pub const wifi_auth_mode_t_WIFI_AUTH_WPA2_ENTERPRISE: wifi_auth_mode_t = 5;
pub const wifi_auth_mode_t_WIFI_AUTH_MAX: wifi_auth_mode_t = 6;
pub type wifi_scan_method_t = u32;
pub const wifi_scan_method_t_WIFI_FAST_SCAN: wifi_scan_method_t = 0;
pub const wifi_scan_method_t_WIFI_ALL_CHANNEL_SCAN: wifi_scan_method_t = 1;
pub type wifi_sort_method_t = u32;
pub const wifi_sort_method_t_WIFI_CONNECT_AP_BY_SIGNAL: wifi_sort_method_t = 0;
pub const wifi_sort_method_t_WIFI_CONNECT_AP_BY_SECURITY: wifi_sort_method_t = 1;
pub const WIFI_PROTOCOL_11B: u32 = 1;
pub const WIFI_PROTOCOL_11G: u32 = 2;
pub const WIFI_PROTOCOL_11N: u32 = 4;
pub use network_adapter::esp_interface_t as wifi_interface_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct wifi_init_config_t {
pub event_handler: system_event_handler_t,
pub osi_funcs: *mut xtensa_void,
pub qos_enable: u8,
pub ampdu_rx_enable: u8,
pub rx_ba_win: u8,
pub rx_ampdu_buf_num: u8,
pub rx_ampdu_buf_len: u32,
pub rx_max_single_pkt_len: u32,
pub rx_buf_len: u32,
pub amsdu_rx_enable: u8,
pub rx_buf_num: u8,
pub rx_pkt_num: u8,
pub left_continuous_rx_buf_num: u8,
pub tx_buf_num: u8,
pub nvs_enable: u8,
pub nano_enable: u8,
pub magic: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct wifi_scan_threshold_t {
pub rssi: i8,
pub authmode: wifi_auth_mode_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct wifi_sta_config_t {
pub ssid: [u8; 32usize],
pub password: [u8; 64usize],
pub scan_method: wifi_scan_method_t,
pub bssid_set: bool,
pub bssid: [u8; 6usize],
pub channel: u8,
pub listen_interval: u16,
pub sort_method: wifi_sort_method_t,
pub threshold: wifi_scan_threshold_t,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct wifi_ap_config_t {
pub ssid: [u8; 32usize],
pub password: [u8; 64usize],
pub ssid_len: u8,
pub channel: u8,
pub authmode: wifi_auth_mode_t,
pub ssid_hidden: u8,
pub max_connection: u8,
pub beacon_interval: u16,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union wifi_config_t {
pub ap: wifi_ap_config_t,
pub sta: wifi_sta_config_t,
_bindgen_union_align: [u32; 31usize],
}
extern "C" {
pub fn esp_wifi_init(config: *const wifi_init_config_t) -> esp_err_t;
pub fn esp_wifi_deinit() -> esp_err_t;
pub fn esp_wifi_set_mode(mode: wifi_mode_t) -> esp_err_t;
pub fn esp_wifi_set_config(interface: wifi_interface_t, conf: *mut wifi_config_t) -> esp_err_t;
pub fn esp_wifi_start() -> esp_err_t;
pub fn esp_wifi_stop() -> esp_err_t;
pub fn esp_wifi_set_protocol(ifx: wifi_interface_t, protocol_bitmap: u8) -> esp_err_t;
pub fn esp_wifi_connect() -> esp_err_t;
}
pub unsafe fn WIFI_INIT_CONFIG_DEFAULT() -> wifi_init_config_t {
wifi_init_config_t {
event_handler: Some(esp_event_send),
osi_funcs: ::core::ptr::null_mut(),
qos_enable: 0,
ampdu_rx_enable: 0,
rx_ampdu_buf_len: 256,
rx_ampdu_buf_num: 5,
amsdu_rx_enable: 0,
rx_ba_win: 0,
rx_max_single_pkt_len: 1600 - 524,
rx_buf_len: 524,
rx_buf_num: 16,
left_continuous_rx_buf_num: 4,
rx_pkt_num: 7,
tx_buf_num: 6,
nvs_enable: 1,
nano_enable: 0,
magic: 0x1F2F3F4F,
}
} |
use crate::config::Config;
use crate::utils;
use std::fs;
use std::path::PathBuf;
pub fn track(path: PathBuf, mut config: Config) -> Result<(), failure::Error> {
let full_path = utils::normalize_path(&std::env::current_dir()?.join(path));
// Test to see if the path is a readable dir
fs::read_dir(&full_path)?;
let inserted = config.paths.insert(full_path.clone());
if inserted {
println!("Now tracking {:?}", full_path);
} else {
println!("Already tracking {:?}", full_path);
}
config.store()?;
Ok(())
}
|
#[doc = "Register `DTCR` reader"]
pub type R = crate::R<DTCR_SPEC>;
#[doc = "Register `DTCR` writer"]
pub type W = crate::W<DTCR_SPEC>;
#[doc = "Field `DTRx` reader - Deadtime Rising value"]
pub type DTRX_R = crate::FieldReader<u16>;
#[doc = "Field `DTRx` writer - Deadtime Rising value"]
pub type DTRX_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 9, O, u16>;
#[doc = "Field `SDTRx` reader - Sign Deadtime Rising value"]
pub type SDTRX_R = crate::BitReader<SDTRX_A>;
#[doc = "Sign Deadtime Rising value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SDTRX_A {
#[doc = "0: Positive deadtime on rising edge"]
Positive = 0,
#[doc = "1: Negative deadtime on rising edge"]
Negative = 1,
}
impl From<SDTRX_A> for bool {
#[inline(always)]
fn from(variant: SDTRX_A) -> Self {
variant as u8 != 0
}
}
impl SDTRX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDTRX_A {
match self.bits {
false => SDTRX_A::Positive,
true => SDTRX_A::Negative,
}
}
#[doc = "Positive deadtime on rising edge"]
#[inline(always)]
pub fn is_positive(&self) -> bool {
*self == SDTRX_A::Positive
}
#[doc = "Negative deadtime on rising edge"]
#[inline(always)]
pub fn is_negative(&self) -> bool {
*self == SDTRX_A::Negative
}
}
#[doc = "Field `SDTRx` writer - Sign Deadtime Rising value"]
pub type SDTRX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SDTRX_A>;
impl<'a, REG, const O: u8> SDTRX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Positive deadtime on rising edge"]
#[inline(always)]
pub fn positive(self) -> &'a mut crate::W<REG> {
self.variant(SDTRX_A::Positive)
}
#[doc = "Negative deadtime on rising edge"]
#[inline(always)]
pub fn negative(self) -> &'a mut crate::W<REG> {
self.variant(SDTRX_A::Negative)
}
}
#[doc = "Field `DTPRSC` reader - Deadtime Prescaler"]
pub type DTPRSC_R = crate::FieldReader;
#[doc = "Field `DTPRSC` writer - Deadtime Prescaler"]
pub type DTPRSC_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O>;
#[doc = "Field `DTRSLKx` reader - Deadtime Rising Sign Lock"]
pub type DTRSLKX_R = crate::BitReader<DTRSLKX_A>;
#[doc = "Deadtime Rising Sign Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DTRSLKX_A {
#[doc = "0: Deadtime rising sign is writable"]
Unlocked = 0,
#[doc = "1: Deadtime rising sign is read-only"]
Locked = 1,
}
impl From<DTRSLKX_A> for bool {
#[inline(always)]
fn from(variant: DTRSLKX_A) -> Self {
variant as u8 != 0
}
}
impl DTRSLKX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DTRSLKX_A {
match self.bits {
false => DTRSLKX_A::Unlocked,
true => DTRSLKX_A::Locked,
}
}
#[doc = "Deadtime rising sign is writable"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == DTRSLKX_A::Unlocked
}
#[doc = "Deadtime rising sign is read-only"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == DTRSLKX_A::Locked
}
}
#[doc = "Field `DTRSLKx` writer - Deadtime Rising Sign Lock"]
pub type DTRSLKX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DTRSLKX_A>;
impl<'a, REG, const O: u8> DTRSLKX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Deadtime rising sign is writable"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut crate::W<REG> {
self.variant(DTRSLKX_A::Unlocked)
}
#[doc = "Deadtime rising sign is read-only"]
#[inline(always)]
pub fn locked(self) -> &'a mut crate::W<REG> {
self.variant(DTRSLKX_A::Locked)
}
}
#[doc = "Field `DTRLKx` reader - Deadtime Rising Lock"]
pub type DTRLKX_R = crate::BitReader<DTRLKX_A>;
#[doc = "Deadtime Rising Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DTRLKX_A {
#[doc = "0: Deadtime rising value and sign is writable"]
Unlocked = 0,
#[doc = "1: Deadtime rising value and sign is read-only"]
Locked = 1,
}
impl From<DTRLKX_A> for bool {
#[inline(always)]
fn from(variant: DTRLKX_A) -> Self {
variant as u8 != 0
}
}
impl DTRLKX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DTRLKX_A {
match self.bits {
false => DTRLKX_A::Unlocked,
true => DTRLKX_A::Locked,
}
}
#[doc = "Deadtime rising value and sign is writable"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == DTRLKX_A::Unlocked
}
#[doc = "Deadtime rising value and sign is read-only"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == DTRLKX_A::Locked
}
}
#[doc = "Field `DTRLKx` writer - Deadtime Rising Lock"]
pub type DTRLKX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DTRLKX_A>;
impl<'a, REG, const O: u8> DTRLKX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Deadtime rising value and sign is writable"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut crate::W<REG> {
self.variant(DTRLKX_A::Unlocked)
}
#[doc = "Deadtime rising value and sign is read-only"]
#[inline(always)]
pub fn locked(self) -> &'a mut crate::W<REG> {
self.variant(DTRLKX_A::Locked)
}
}
#[doc = "Field `DTFx` reader - Deadtime Falling value"]
pub type DTFX_R = crate::FieldReader<u16>;
#[doc = "Field `DTFx` writer - Deadtime Falling value"]
pub type DTFX_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 9, O, u16>;
#[doc = "Field `SDTFx` reader - Sign Deadtime Falling value"]
pub type SDTFX_R = crate::BitReader<SDTFX_A>;
#[doc = "Sign Deadtime Falling value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SDTFX_A {
#[doc = "0: Positive deadtime on falling edge"]
Positive = 0,
#[doc = "1: Negative deadtime on falling edge"]
Negative = 1,
}
impl From<SDTFX_A> for bool {
#[inline(always)]
fn from(variant: SDTFX_A) -> Self {
variant as u8 != 0
}
}
impl SDTFX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDTFX_A {
match self.bits {
false => SDTFX_A::Positive,
true => SDTFX_A::Negative,
}
}
#[doc = "Positive deadtime on falling edge"]
#[inline(always)]
pub fn is_positive(&self) -> bool {
*self == SDTFX_A::Positive
}
#[doc = "Negative deadtime on falling edge"]
#[inline(always)]
pub fn is_negative(&self) -> bool {
*self == SDTFX_A::Negative
}
}
#[doc = "Field `SDTFx` writer - Sign Deadtime Falling value"]
pub type SDTFX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SDTFX_A>;
impl<'a, REG, const O: u8> SDTFX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Positive deadtime on falling edge"]
#[inline(always)]
pub fn positive(self) -> &'a mut crate::W<REG> {
self.variant(SDTFX_A::Positive)
}
#[doc = "Negative deadtime on falling edge"]
#[inline(always)]
pub fn negative(self) -> &'a mut crate::W<REG> {
self.variant(SDTFX_A::Negative)
}
}
#[doc = "Field `DTFSLKx` reader - Deadtime Falling Sign Lock"]
pub type DTFSLKX_R = crate::BitReader<DTFSLKX_A>;
#[doc = "Deadtime Falling Sign Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DTFSLKX_A {
#[doc = "0: Deadtime falling sign is writable"]
Unlocked = 0,
#[doc = "1: Deadtime falling sign is read-only"]
Locked = 1,
}
impl From<DTFSLKX_A> for bool {
#[inline(always)]
fn from(variant: DTFSLKX_A) -> Self {
variant as u8 != 0
}
}
impl DTFSLKX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DTFSLKX_A {
match self.bits {
false => DTFSLKX_A::Unlocked,
true => DTFSLKX_A::Locked,
}
}
#[doc = "Deadtime falling sign is writable"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == DTFSLKX_A::Unlocked
}
#[doc = "Deadtime falling sign is read-only"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == DTFSLKX_A::Locked
}
}
#[doc = "Field `DTFSLKx` writer - Deadtime Falling Sign Lock"]
pub type DTFSLKX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DTFSLKX_A>;
impl<'a, REG, const O: u8> DTFSLKX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Deadtime falling sign is writable"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut crate::W<REG> {
self.variant(DTFSLKX_A::Unlocked)
}
#[doc = "Deadtime falling sign is read-only"]
#[inline(always)]
pub fn locked(self) -> &'a mut crate::W<REG> {
self.variant(DTFSLKX_A::Locked)
}
}
#[doc = "Field `DTFLKx` reader - Deadtime Falling Lock"]
pub type DTFLKX_R = crate::BitReader<DTFLKX_A>;
#[doc = "Deadtime Falling Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DTFLKX_A {
#[doc = "0: Deadtime falling value and sign is writable"]
Unlocked = 0,
#[doc = "1: Deadtime falling value and sign is read-only"]
Locked = 1,
}
impl From<DTFLKX_A> for bool {
#[inline(always)]
fn from(variant: DTFLKX_A) -> Self {
variant as u8 != 0
}
}
impl DTFLKX_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DTFLKX_A {
match self.bits {
false => DTFLKX_A::Unlocked,
true => DTFLKX_A::Locked,
}
}
#[doc = "Deadtime falling value and sign is writable"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == DTFLKX_A::Unlocked
}
#[doc = "Deadtime falling value and sign is read-only"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == DTFLKX_A::Locked
}
}
#[doc = "Field `DTFLKx` writer - Deadtime Falling Lock"]
pub type DTFLKX_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DTFLKX_A>;
impl<'a, REG, const O: u8> DTFLKX_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Deadtime falling value and sign is writable"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut crate::W<REG> {
self.variant(DTFLKX_A::Unlocked)
}
#[doc = "Deadtime falling value and sign is read-only"]
#[inline(always)]
pub fn locked(self) -> &'a mut crate::W<REG> {
self.variant(DTFLKX_A::Locked)
}
}
impl R {
#[doc = "Bits 0:8 - Deadtime Rising value"]
#[inline(always)]
pub fn dtrx(&self) -> DTRX_R {
DTRX_R::new((self.bits & 0x01ff) as u16)
}
#[doc = "Bit 9 - Sign Deadtime Rising value"]
#[inline(always)]
pub fn sdtrx(&self) -> SDTRX_R {
SDTRX_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bits 10:12 - Deadtime Prescaler"]
#[inline(always)]
pub fn dtprsc(&self) -> DTPRSC_R {
DTPRSC_R::new(((self.bits >> 10) & 7) as u8)
}
#[doc = "Bit 14 - Deadtime Rising Sign Lock"]
#[inline(always)]
pub fn dtrslkx(&self) -> DTRSLKX_R {
DTRSLKX_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Deadtime Rising Lock"]
#[inline(always)]
pub fn dtrlkx(&self) -> DTRLKX_R {
DTRLKX_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bits 16:24 - Deadtime Falling value"]
#[inline(always)]
pub fn dtfx(&self) -> DTFX_R {
DTFX_R::new(((self.bits >> 16) & 0x01ff) as u16)
}
#[doc = "Bit 25 - Sign Deadtime Falling value"]
#[inline(always)]
pub fn sdtfx(&self) -> SDTFX_R {
SDTFX_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 30 - Deadtime Falling Sign Lock"]
#[inline(always)]
pub fn dtfslkx(&self) -> DTFSLKX_R {
DTFSLKX_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Deadtime Falling Lock"]
#[inline(always)]
pub fn dtflkx(&self) -> DTFLKX_R {
DTFLKX_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:8 - Deadtime Rising value"]
#[inline(always)]
#[must_use]
pub fn dtrx(&mut self) -> DTRX_W<DTCR_SPEC, 0> {
DTRX_W::new(self)
}
#[doc = "Bit 9 - Sign Deadtime Rising value"]
#[inline(always)]
#[must_use]
pub fn sdtrx(&mut self) -> SDTRX_W<DTCR_SPEC, 9> {
SDTRX_W::new(self)
}
#[doc = "Bits 10:12 - Deadtime Prescaler"]
#[inline(always)]
#[must_use]
pub fn dtprsc(&mut self) -> DTPRSC_W<DTCR_SPEC, 10> {
DTPRSC_W::new(self)
}
#[doc = "Bit 14 - Deadtime Rising Sign Lock"]
#[inline(always)]
#[must_use]
pub fn dtrslkx(&mut self) -> DTRSLKX_W<DTCR_SPEC, 14> {
DTRSLKX_W::new(self)
}
#[doc = "Bit 15 - Deadtime Rising Lock"]
#[inline(always)]
#[must_use]
pub fn dtrlkx(&mut self) -> DTRLKX_W<DTCR_SPEC, 15> {
DTRLKX_W::new(self)
}
#[doc = "Bits 16:24 - Deadtime Falling value"]
#[inline(always)]
#[must_use]
pub fn dtfx(&mut self) -> DTFX_W<DTCR_SPEC, 16> {
DTFX_W::new(self)
}
#[doc = "Bit 25 - Sign Deadtime Falling value"]
#[inline(always)]
#[must_use]
pub fn sdtfx(&mut self) -> SDTFX_W<DTCR_SPEC, 25> {
SDTFX_W::new(self)
}
#[doc = "Bit 30 - Deadtime Falling Sign Lock"]
#[inline(always)]
#[must_use]
pub fn dtfslkx(&mut self) -> DTFSLKX_W<DTCR_SPEC, 30> {
DTFSLKX_W::new(self)
}
#[doc = "Bit 31 - Deadtime Falling Lock"]
#[inline(always)]
#[must_use]
pub fn dtflkx(&mut self) -> DTFLKX_W<DTCR_SPEC, 31> {
DTFLKX_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Timerx Deadtime Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dtcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dtcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DTCR_SPEC;
impl crate::RegisterSpec for DTCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dtcr::R`](R) reader structure"]
impl crate::Readable for DTCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dtcr::W`](W) writer structure"]
impl crate::Writable for DTCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DTCR to value 0"]
impl crate::Resettable for DTCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use math::{fields::f128::BaseElement, get_power_series, log2, polynom, FieldElement, StarkField};
use utils::group_vector_elements;
use winter_fri::folding;
static BATCH_SIZES: [usize; 3] = [65536, 131072, 262144];
pub fn interpolate_batch(c: &mut Criterion) {
let mut group = c.benchmark_group("interpolate batch");
for &size in &BATCH_SIZES {
let (xs, ys) = build_coordinate_batches(size);
group.bench_function(BenchmarkId::new("generic", size), |b| {
b.iter(|| polynom::interpolate_batch(&xs, &ys))
});
}
}
pub fn apply_drp(c: &mut Criterion) {
let mut group = c.benchmark_group("drp");
for &size in &BATCH_SIZES {
let (_, ys) = build_coordinate_batches(size);
let alpha = BaseElement::rand();
group.bench_function(BenchmarkId::new("base field", size), |b| {
b.iter(|| folding::apply_drp(&ys, BaseElement::GENERATOR, alpha))
});
}
}
criterion_group!(quartic_group, interpolate_batch, apply_drp);
criterion_main!(quartic_group);
// HELPER FUNCTIONS
// ================================================================================================
fn build_coordinate_batches(batch_size: usize) -> (Vec<[BaseElement; 4]>, Vec<[BaseElement; 4]>) {
let r = BaseElement::get_root_of_unity(log2(batch_size));
let xs = group_vector_elements(get_power_series(r, batch_size));
let ys = group_vector_elements(BaseElement::prng_vector([1; 32], batch_size));
(xs, ys)
}
|
use std::{io, net::TcpStream, sync::mpsc, thread};
use anyhow::Error;
use io::BufRead;
use nmea::Nmea;
use termion::{event::Key, input::MouseTerminal, raw::IntoRawMode, screen::AlternateScreen};
use tui::{
backend::TermionBackend,
layout::{Constraint, Direction, Layout},
style::{Color, Style},
text::{Span, Spans},
widgets::{Block, Borders, Paragraph, Wrap},
Terminal,
};
#[allow(dead_code)]
mod util {
use std::io;
use std::sync::mpsc;
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use std::thread;
use std::time::Duration;
use termion::event::Key;
use termion::input::TermRead;
pub enum Event<I> {
Input(I),
Tick,
}
/// A small event handler that wrap termion input and tick events. Each event
/// type is handled in its own thread and returned to a common `Receiver`
pub struct Events {
rx: mpsc::Receiver<Event<Key>>,
input_handle: thread::JoinHandle<()>,
ignore_exit_key: Arc<AtomicBool>,
tick_handle: thread::JoinHandle<()>,
}
#[derive(Debug, Clone, Copy)]
pub struct Config {
pub exit_key: Key,
pub tick_rate: Duration,
}
impl Default for Config {
fn default() -> Config {
Config {
exit_key: Key::Char('q'),
tick_rate: Duration::from_millis(50),
}
}
}
impl Events {
pub fn new() -> Events {
Events::with_config(Config::default())
}
pub fn with_config(config: Config) -> Events {
let (tx, rx) = mpsc::channel();
let ignore_exit_key = Arc::new(AtomicBool::new(false));
let input_handle = {
let tx = tx.clone();
let ignore_exit_key = ignore_exit_key.clone();
thread::spawn(move || {
let stdin = io::stdin();
for evt in stdin.keys() {
if let Ok(key) = evt {
if let Err(err) = tx.send(Event::Input(key)) {
eprintln!("{}", err);
return;
}
if !ignore_exit_key.load(Ordering::Relaxed) && key == config.exit_key {
return;
}
}
}
})
};
let tick_handle = {
thread::spawn(move || loop {
tx.send(Event::Tick).unwrap();
thread::sleep(config.tick_rate);
})
};
Events {
rx,
ignore_exit_key,
input_handle,
tick_handle,
}
}
pub fn next(&self) -> Result<Event<Key>, mpsc::TryRecvError> {
self.rx.try_recv()
}
pub fn disable_exit_key(&mut self) {
self.ignore_exit_key.store(true, Ordering::Relaxed);
}
pub fn enable_exit_key(&mut self) {
self.ignore_exit_key.store(false, Ordering::Relaxed);
}
}
}
use chrono::{DateTime, Datelike, NaiveDate, NaiveDateTime, Timelike};
use chrono::{Local, SecondsFormat, Utc};
use util::*;
fn datetime_str(nmea: &Nmea, rmc_datetime: NaiveDateTime) -> Option<String> {
let fix_date = nmea.fix_date?;
let date = NaiveDate::from_ymd(fix_date.year() + 2000, fix_date.month(), fix_date.day());
let datetime = date.and_time(nmea.fix_time?);
let diff = rmc_datetime - datetime;
Some(format!(
"{} / diff={} / {}",
datetime,
diff,
datetime.nanosecond()
))
}
fn latlonalt_str(nmea: &Nmea) -> Option<String> {
Some(format!(
"{:.6} / {:.6} / {:.6}",
nmea.latitude?, nmea.longitude?, nmea.altitude?
))
}
fn dop_str(nmea: &Nmea) -> Option<String> {
Some(format!(
"{:.2} / {:.2} / {:.2}",
nmea.hdop?, nmea.vdop?, nmea.pdop?
))
}
fn option_str(s: Option<String>) -> String {
match s {
Some(s) => s,
None => "<not available>".to_owned(),
}
}
fn main() -> Result<(), Error> {
// Terminal initialization
let stdout = io::stdout().into_raw_mode()?;
let stdout = MouseTerminal::from(stdout);
let stdout = AlternateScreen::from(stdout);
let backend = TermionBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
terminal.hide_cursor()?;
terminal.clear()?;
let events = Events::new();
let (tx, rx) = mpsc::channel();
let bufread: io::BufReader<Box<dyn io::Read + Send>> =
match (std::env::var("NMEACLI_ADDR"), std::env::var("NMEACLI_DEV")) {
(Ok(addr), _) => {
let stream = TcpStream::connect(addr)?;
io::BufReader::new(Box::new(stream))
}
(_, Ok(dev)) => {
let file = std::fs::File::open(dev)?;
io::BufReader::new(Box::new(file))
}
_ => {
panic!("NMEACLI_ADDR or NMEACLI_DEV should be specified");
}
};
let _thread = thread::spawn(move || {
let tx = tx.clone();
let mut lines = bufread.lines();
lines.next();
for line in lines {
let local: DateTime<Local> = Local::now();
let line = line.unwrap();
tx.send((local, line)).ok();
}
});
let mut nmea = Nmea::new();
let mut messages = Vec::new();
let mut rmc_datetime = Utc::now().naive_utc();
'outer: loop {
while let Ok((local, line)) = rx.try_recv() {
if let Ok(msg) = nmea.parse(&line) {
let time_str = local.to_rfc3339_opts(SecondsFormat::Secs, true);
messages.insert(
0,
Spans::from(vec![
Span::styled(time_str, Style::default().fg(Color::DarkGray)),
Span::raw(format!(" {}\n", line.trim())),
]),
);
while messages.len() > 100 {
messages.pop();
}
match msg {
nmea::SentenceType::RMC => {
rmc_datetime = local.naive_utc();
}
_ => {}
}
}
}
while let Ok(Event::Input(input)) = events.next() {
if let Key::Char('q') = input {
break 'outer;
}
}
terminal.draw(|f| {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints(
[
Constraint::Length(5),
Constraint::Min(15),
Constraint::Length(20),
]
.as_ref(),
)
.split(f.size());
{
let chunk = chunks[0];
let block = Block::default().title("Status").borders(Borders::TOP);
let mut msgs = Vec::new();
msgs.push(Spans::from(format!(
"datetime : {}\n",
option_str(datetime_str(&nmea, rmc_datetime)),
)));
msgs.push(Spans::from(format!(
"latlonalt : {}\n",
option_str(latlonalt_str(&nmea)),
)));
msgs.push(Spans::from(format!(
"dop (h/v/p): {}\n",
option_str(dop_str(&nmea)),
)));
let body_rect = block.inner(chunk);
let paragraph = Paragraph::new(msgs).wrap(Wrap { trim: false });
f.render_widget(block, chunk);
f.render_widget(paragraph, body_rect);
}
{
let chunk = chunks[1];
let title = Span::raw(format!(
"Satellites (fixed={}, total={})",
option_str(nmea.num_of_fix_satellites.map(|v| v.to_string())),
nmea.satellites.len(),
));
let block = Block::default().title(title).borders(Borders::TOP);
let mut msgs = Vec::new();
for sat in &nmea.satellites {
msgs.push(Spans::from(format!("{}\n", sat)));
}
let body_rect = block.inner(chunk);
let paragraph = Paragraph::new(msgs).wrap(Wrap { trim: false });
f.render_widget(block, chunk);
f.render_widget(paragraph, body_rect);
}
{
let chunk = chunks[2];
let block = Block::default().title("Messages").borders(Borders::TOP);
let body_rect = block.inner(chunk);
let paragraph = Paragraph::new(messages.clone()).wrap(Wrap { trim: false });
f.render_widget(block, chunk);
f.render_widget(paragraph, body_rect);
}
})?;
thread::sleep(std::time::Duration::from_millis(200));
}
terminal.clear()?;
Ok(())
}
|
use super::base::*;
use super::error::HackError;
use crate::hack_report;
use std::collections::HashMap;
use std::vec::Vec;
/**
* Recursive Descent Parser
*
* COMMAND: ACOMMAND
* | CCOMMAND
* | LCOMMAND
* ACOMAND: AT VALUE
* VALUE: NUMBER | VARIABLE
* CCOMMAND: DEST COMP JUMP
* DEST: EMPTY | REGS
* REGS: M|MD|MD|A|AM|AD||AMD
*
*/
#[derive(Debug)]
pub struct ParserArg<'a> {
pub parser: Option<&'a mut Parser>,
pub tokens: Option<Box<Vec<Token>>>,
pub index: Option<Box<usize>>,
pub content: String,
pub line_num: Option<Box<usize>>,
}
impl<'a> std::fmt::Display for ParserArg<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
writeln!(
f,
"[{}]: {} -> tokens: {:?}",
self.line_num(),
self.content,
self.tokens.as_ref().unwrap()
)
}
}
#[derive(Debug)]
pub struct LCmdResult {
pub label: String,
}
#[derive(Debug)]
pub struct CCmdResult {
pub dest: Option<String>,
pub comp: Option<String>,
pub jump: Option<String>,
}
#[derive(Debug)]
pub struct ACmdResult {
pub value: String,
}
#[derive(Debug)]
pub struct ParserResult {
pub t: Option<CommandType>,
pub lr: Option<LCmdResult>,
pub cr: Option<CCmdResult>,
pub ar: Option<ACmdResult>,
}
impl ParserResult {
pub fn clear(&mut self) {
self.lr = None;
self.cr = None;
self.ar = None;
}
}
impl<'a> ParserArg<'a> {
pub fn advance(&mut self) {
let ref mut index = **self.index.as_mut().unwrap();
*index += 1;
}
pub fn line_num(&self) -> usize {
**self.line_num.as_ref().unwrap()
}
}
#[derive(Debug)]
pub struct Parser {
pub map: Option<HashMap<String, usize>>,
pub varmem: Option<usize>, // variable memory
pub result: Option<ParserResult>,
}
#[warn(unused_macros)]
macro_rules! create_expect {
($func_name:ident, $x:pat) => {
pub fn $func_name<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
match &parg.tokens.as_ref().unwrap()[**(parg.index.as_ref().unwrap())].token_type {
$x => {
parg.advance();
Ok(parg)
}
t => hack_report!(parg, format!("Expect {}, but got {:?}", stringify!($x), t)),
}
}
};
}
#[warn(unused_macros)]
macro_rules! create_expect_predefined {
($func_name: ident, $x:ident, $y: pat $(, $extra: pat)*) => {
pub fn $func_name<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let tokens = parg.tokens.as_ref().unwrap();
//let parser = parg.parser.as_ref().unwrap();
let curr = **parg.index.as_ref().unwrap();
match tokens[curr].token_type {
$y => {
if $x.contains_key(&tokens[curr].repr) {
parg.advance();
return Ok(parg);
}
hack_report!(
parg,
format!(
"{} is not defined in table {}!",
&tokens[curr].repr,
stringify!($x)
)
)
}
$(
$extra => {
if $x.contains_key(&tokens[curr].repr) {
parg.advance();
return Ok(parg);
}
hack_report!(
parg,
format!(
"{} is not defined in table {}!",
&tokens[curr].repr,
stringify!($x)
)
)
}
)*
_ => hack_report!(
parg,
format!(
"Expected {} but found {:?}",
stringify!($y),
tokens[curr].token_type
)
),
}
}
};
}
// 一次性解析一组 token
// 返回结果给 HPU
impl Parser {
pub fn new() -> Parser {
Parser {
map: Some(HashMap::new()),
varmem: Some(16),
result: Some(ParserResult {
t: None,
ar: None,
cr: None,
lr: None,
}),
}
}
pub fn parse_command<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg, Box<HackError>> {
println!("{}", parg);
let tokens = parg.tokens.as_mut().unwrap();
match tokens[0].token_type {
TOKENTYPE::AT => {
return Parser::expect_a_command(parg);
}
TOKENTYPE::LEFTBRACE => Parser::expect_l_command(parg),
_ => {
return Parser::expect_c_command(parg);
}
}
}
create_expect!(expect_leftbrace, TOKENTYPE::LEFTBRACE);
create_expect!(expect_rightbrace, TOKENTYPE::RIGHTBRACE);
create_expect!(expect_equal, TOKENTYPE::EQUAL);
create_expect!(expect_semicolon, TOKENTYPE::SEMICOLON);
create_expect_predefined!(expect_ccmd_dest, DEST, TOKENTYPE::SYMBOL);
create_expect_predefined!(
expect_ccmd_comp,
COMP,
TOKENTYPE::EXPRESSION,
TOKENTYPE::SYMBOL,
TOKENTYPE::NUMBER
);
create_expect_predefined!(expect_ccmd_jump, JUMP, TOKENTYPE::SYMBOL);
pub fn expect_l_command<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
match Parser::expect_leftbrace(parg)
.and_then(Parser::expect_symbol_vl)
.and_then(Parser::expect_rightbrace)
{
Ok(arg) => {
let label = arg.tokens.as_ref().unwrap()[1].repr.clone();
let parser = arg.parser.as_mut().unwrap();
let result = parser.result.as_mut().unwrap();
result.clear();
result.t = Some(CommandType::LCommand);
result.lr = Some(LCmdResult {
label: label.clone(),
});
Ok(arg)
}
Err(e) => Err(e),
}
}
pub fn expect_c_command<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let parser = parg.parser.as_mut().unwrap();
let result = parser.result.as_mut().unwrap();
result.clear();
result.t = Some(CommandType::CCommand);
result.cr = Some(CCmdResult {
dest: None,
comp: None,
jump: None,
});
Parser::expect_c_command_rec(parg)
}
pub fn expect_c_command_rec<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let parser = parg.parser.as_mut().unwrap();
let result = parser.result.as_mut().unwrap();
let tokens = parg.tokens.as_ref().unwrap();
let curr = **parg.index.as_ref().unwrap();
let len = tokens.len();
if curr == len {
return Ok(parg);
} else if curr + 1 < len && tokens[curr + 1].token_type == TOKENTYPE::EQUAL {
// this is a defect, but I have no time to fix the expect_* macro
result.cr.as_mut().unwrap().dest = Some(tokens[curr].repr.clone());
return Parser::expect_ccmd_dest(parg)
.and_then(Parser::expect_equal)
.and_then(Parser::expect_c_command_rec);
} else if tokens[curr].token_type == TOKENTYPE::SEMICOLON {
result.cr.as_mut().unwrap().jump = Some(tokens[curr + 1].repr.clone());
return Parser::expect_semicolon(parg).and_then(Parser::expect_ccmd_jump);
} else {
result.cr.as_mut().unwrap().comp = Some(tokens[curr].repr.clone());
return Parser::expect_ccmd_comp(parg).and_then(Parser::expect_c_command_rec);
}
}
pub fn expect_a_command<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
parg.advance();
//look ahead
let tokens = parg.tokens.as_ref().unwrap();
match tokens[1].token_type {
TOKENTYPE::NUMBER => Parser::expect_number(parg),
TOKENTYPE::SYMBOL => Parser::expect_symbol_va(parg),
_ => hack_report!(parg, "Illegal A command"),
}
}
pub fn expect_number<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let tokens = parg.tokens.as_ref().unwrap();
let curr = **parg.index.as_ref().unwrap();
let parser = parg.parser.as_mut().unwrap();
match tokens[curr].token_type {
TOKENTYPE::NUMBER => {
let result = parser.result.as_mut().unwrap();
result.t = Some(CommandType::ACommand);
result.ar = Some(ACmdResult {
value: tokens[curr].repr.clone(),
});
parg.advance();
return Ok(parg);
}
_ => {
hack_report!(parg, "No number found")
}
}
}
pub fn expect_symbol_va<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let tokens = parg.tokens.as_ref().unwrap();
let parser = parg.parser.as_mut().unwrap();
let curr = **parg.index.as_ref().unwrap();
match tokens[curr].token_type {
TOKENTYPE::SYMBOL => {
let result = parser.result.as_mut().unwrap();
result.clear();
result.t = Some(CommandType::ACommand);
result.ar = Some(ACmdResult {
value: tokens[curr].repr.clone(),
});
parg.advance();
return Ok(parg);
}
_ => hack_report!(parg, "No symbol found"),
}
}
pub fn expect_symbol_vl<'a>(
parg: &'a mut ParserArg<'a>,
) -> Result<&'a mut ParserArg<'a>, Box<HackError>> {
let tokens = parg.tokens.as_ref().unwrap();
let curr = **parg.index.as_ref().unwrap();
println!("{:?}", tokens[curr]);
match tokens[curr].token_type {
TOKENTYPE::SYMBOL => {
if PREDEFINE_SYMBOLS.contains_key(&tokens[curr].repr) {
hack_report!(parg, "Using reserved keyword as label is not allowed")
}
parg.advance();
Ok(parg)
}
_ => hack_report!(parg, "No label found"),
}
}
}
#[cfg(test)]
mod tests {}
|
use super::MapsetTags;
use rand::seq::SliceRandom;
pub struct Hints {
pub artist_guessed: bool,
hint_level: u8,
title_mask: Vec<bool>,
indices: Vec<usize>,
_tags: MapsetTags,
}
impl Hints {
pub fn new(title: &str, tags: MapsetTags) -> Self {
// Indices of chars that still need to be revealed
let mut indices: Vec<_> = title
.chars()
.enumerate()
.skip(1) // first char revealed immediatly
.filter(|(_, c)| *c != ' ') // spaces revealed immediatly
.map(|(i, _)| i)
.collect();
let mut rng = rand::thread_rng();
indices.shuffle(&mut rng);
let mut title_mask = Vec::with_capacity(title.len());
title_mask.push(true);
for c in title.chars().skip(1) {
title_mask.push(c == ' ');
}
Self {
artist_guessed: false,
hint_level: 0,
title_mask,
indices,
_tags: tags,
}
}
pub fn get(&mut self, title: &str, artist: &str) -> String {
self.hint_level = self.hint_level.saturating_add(1);
if self.hint_level == 1 {
let word_count = title.split(' ').count();
format!(
"Let me give you a hint: The title has {amount} \
word{plural} and the starting letter is `{first}`",
amount = word_count,
plural = if word_count != 1 { "s" } else { "" },
first = title.chars().next().unwrap(),
)
} else if self.hint_level == 2 && !self.artist_guessed {
let mut artist_hint = String::with_capacity(3 * artist.len() - 2);
artist_hint.push(artist.chars().next().unwrap());
for c in artist.chars().skip(1) {
artist_hint.push(if c == ' ' { c } else { '▢' });
}
format!("Here's my second hint: The artist looks like `{artist_hint}`")
// } else if !self.tags.is_empty()
// && ((self.hint_level == 2 && self.artist_guessed) || self.hint_level == 3)
// {
// self.hint_level = self.hint_level.saturating_add(1);
// format!(
// "Another hint: The map's tags are `{}`",
// self.tags.join(", ")
// )
} else if let Some(i) = self.indices.pop() {
self.title_mask[i] = true;
let title_hint: String = self
.title_mask
.iter()
.zip(title.chars())
.map(|(mask, c)| if *mask { c } else { '▢' })
.collect();
format!("Slowly constructing the title: `{title_hint}`")
} else {
format!("Bruh the title is literally `{title}` xd")
}
}
}
|
use hacspec_hmac::*;
use hacspec_lib::*;
use hacspec_sha256::*;
// HASH_LEN for SHA256
// XXX: HMAC should probably expose this
const HASH_LEN: usize = 256 / 8;
#[derive(Debug)]
pub enum HkdfError {
InvalidOutputLength,
}
pub type HkdfByteSeqResult = Result<ByteSeq, HkdfError>;
/// Extract a pseudo-random key from input key material (IKM) and optionally a salt.
/// Note that salt can be empty Bytes.
pub fn extract(salt: &ByteSeq, ikm: &ByteSeq) -> PRK {
let mut salt_or_zero = ByteSeq::new(HASH_LEN);
if salt.len() > 0 {
salt_or_zero = ByteSeq::from_seq(salt)
};
PRK::from_seq(&hmac(&salt_or_zero, ikm))
}
fn build_hmac_txt(t: &ByteSeq, info: &ByteSeq, iteration: U8) -> ByteSeq {
let mut out = ByteSeq::new(t.len() + info.len() + 1);
out = out.update(0, t);
out = out.update(t.len(), info);
out[t.len() + info.len()] = iteration;
out
}
/// Compute ceil(a/b), returning a usize.
fn div_ceil(a: usize, b: usize) -> usize {
let mut q = a / b;
if a % b != 0 {
q = q + 1;
}
q
}
fn check_output_limit(l: usize) -> Result<usize, HkdfError> {
let n = div_ceil(l, HASH_LEN);
if n <= 255 {
Result::<usize, HkdfError>::Ok(n)
} else {
Result::<usize, HkdfError>::Err(HkdfError::InvalidOutputLength)
}
}
/// Expand a key prk, using potentially empty info, and output length l.
/// Key prk must be at least of length HASH_LEN.
/// Output length l can be at most 255*HASH_LEN.
pub fn expand(prk: &ByteSeq, info: &ByteSeq, l: usize) -> HkdfByteSeqResult {
let n = check_output_limit(l)?;
let mut t_i = PRK::new(); // PRK is of length HASH_SIZE
let mut t = ByteSeq::new(n * HASH_SIZE);
for i in 0..n {
let hmac_txt_in = if i == 0 {
build_hmac_txt(&ByteSeq::new(0), info, U8((i as u8) + 1u8))
} else {
build_hmac_txt(&ByteSeq::from_seq(&t_i), info, U8((i as u8) + 1u8))
};
t_i = hmac(prk, &hmac_txt_in);
t = t.update(i * t_i.len(), &t_i);
}
HkdfByteSeqResult::Ok(t.slice(0, l))
}
|
// Copyright (C) 2020 Stephane Raux. Distributed under the MIT license.
//! Defines a monotonic clock whose values are instances of `Duration`.
//!
//! # Why not `std::time::Instant`?
//!
//! `Instant` is opaque and cannot be serialized.
//!
//! # Example
//!
//! ```rust
//! let mut clock = moniclock::Clock::new();
//! let t0 = clock.elapsed();
//! let sleep_duration = std::time::Duration::from_millis(100);
//! std::thread::sleep(sleep_duration);
//! let t1 = clock.elapsed();
//! assert!(t1 - t0 >= sleep_duration);
//! ```
#![deny(warnings)]
#![deny(missing_docs)]
use std::time::Duration;
#[cfg(unix)]
#[path = "unix.rs"]
mod platform;
#[cfg(windows)]
#[path = "windows.rs"]
mod platform;
use platform::Clock as Inner;
/// Monotonic clock
#[derive(Debug)]
pub struct Clock {
inner: Inner,
}
impl Clock {
/// Instantiates a `Clock`.
///
/// A given `Clock` instance is monotonic.
pub fn new() -> Self {
Self {
inner: Inner::new(),
}
}
/// Returns the time elapsed since some arbitrary epoch. The epoch is constant across all
/// `Clock` instances for as long as the system runs.
///
/// This function is monotonically increasing. Each returned `Duration` is greater or equal to
/// any previous `Duration` returned for the same `Clock` instance.
pub fn elapsed(&mut self) -> Duration {
self.inner.elapsed()
}
}
#[cfg(test)]
mod tests {
use crate::Clock;
use std::{
thread::sleep,
time::Duration,
};
#[test]
fn a_clock_is_instantiated() {
let _ = Clock::new();
}
#[test]
fn clock_is_monotonic() {
let mut clock = Clock::new();
let t0 = clock.elapsed();
sleep(Duration::from_millis(100));
let t1 = clock.elapsed();
assert!(t0 <= t1);
}
#[test]
fn clock_reports_correct_duration() {
let mut clock = Clock::new();
let t0 = clock.elapsed();
let sleep_duration = Duration::from_millis(100);
sleep(sleep_duration);
let t1 = clock.elapsed();
assert!(t1 - t0 >= sleep_duration);
}
}
|
pub mod enclave_tls;
mod protocol;
|
use crate::stub_servers::stub_server_tcp::StubServerTcp;
use crate::*;
use futures_intrusive::sync::ManualResetEvent;
use std::error::Error;
use std::sync::Arc;
use tokio::task::JoinHandle;
#[tracing::instrument(name = "Start running stub server", level = "debug", skip(stop_event))]
pub fn run(
config: &RnpStubServerConfig,
stop_event: Arc<ManualResetEvent>,
server_started_event: Arc<ManualResetEvent>,
) -> JoinHandle<Result<(), Box<dyn Error + Send + Sync>>> {
match config.protocol {
RnpSupportedProtocol::TCP => return StubServerTcp::run_new(config.clone(), stop_event, server_started_event),
_ => panic!("Protocol {} is not supported!", config.protocol),
}
}
|
extern crate lazy_static;
extern crate unicode_xid;
extern crate z3;
use std::fs::File;
use std::io::Read;
use std::path::Path;
mod implicit_parse {
#![allow(clippy::all)]
include!(concat!(env!("OUT_DIR"), "/implicit_parse.rs"));
}
#[macro_use]
mod common;
mod env;
mod explicit;
mod hindley_milner;
mod implicit;
mod refined;
mod tok;
mod typed;
#[macro_use]
mod liquid;
mod eval;
mod lambdal;
use crate::common::Result;
use crate::liquid::q;
fn usage() -> ! {
let argv0 = std::env::args()
.next()
.unwrap_or_else(|| "<mdl>".to_string());
die!(
"Usage: {} [OPTION...] PATH\n\
Type inference.\n\
\n\
Options:\n\
-help:\tshow this message",
argv0
);
}
fn parse_args() -> String {
for arg in std::env::args().skip(1) {
if arg == "-help" {
usage();
} else if arg.chars().next().unwrap_or(' ') == '-' {
eprintln!("unknown arg '{}'", arg);
usage();
} else {
return arg;
}
}
// no args? reading from stdin then
String::from("")
}
pub fn implicit_open<R: Read>(file: &mut R) -> Result<Box<implicit::Expr>> {
let input = {
let mut s = String::new();
if let Err(why) = file.read_to_string(&mut s) {
return err!("read: {}", why);
}
s
};
let lexer = tok::Tokenizer::new(&input);
let parser = crate::implicit_parse::ProgramParser::new();
match parser.parse(&input, lexer) {
Ok(v) => Ok(Box::new(v)),
Err(e) => err!("parse_Program: {:?}", e),
}
}
fn main() {
let parse_result = match parse_args().as_ref() {
"" => implicit_open(&mut std::io::stdin()),
ref s => {
let path = Path::new(s);
let mut file = match File::open(path) {
// The `description` method of `io::Error` returns a string that
// describes the error
Err(why) => die!("open({}): {}", path.display(), why,),
Ok(file) => file,
};
implicit_open(&mut file)
}
};
let i_expr = match parse_result {
Ok(expr) => expr,
Err(e) => die!("implicit_open: {}", e),
};
let (anf_expr, type_env) = match lambdal::anf(&i_expr) {
Ok(tuple) => tuple,
Err(e) => die!("anf: {}", e),
};
// define the liquid type templates
let qs = [
q("0 <= ν").unwrap(),
q("★ <= ν").unwrap(),
q("ν < ★").unwrap(),
//q("ν < len ★").unwrap(),
];
let refined_env = match liquid::infer(&anf_expr, &type_env, &qs[..]) {
Ok(env) => env,
Err(e) => die!("infer: {}", e),
};
// println!("refined:\n\n{:?}\n", refined);
println!("\nrefined Γ:");
let mut ids: Vec<_> = refined_env.keys().clone().collect();
ids.sort();
for id in ids {
println!("{}:\t{:?}", id, refined_env[id]);
}
println!();
let val = eval::interpret(&anf_expr);
println!("result:\n\n{:?}\n", val);
}
|
use std::collections::HashMap;
use std::ffi::{CString, CStr};
pub struct CStringCache {
cache: HashMap<String, CString>
}
impl CStringCache {
pub fn new() -> CStringCache {
CStringCache {
cache: HashMap::new()
}
}
pub fn intern(&mut self, string: &str) -> &CStr {
self.cache.entry(string.to_owned()).or_insert_with(|| {
CString::new(string).unwrap()
})
}
}
|
#![allow(dead_code)]
use std::sync::{Arc, Mutex};
use std::{thread, time};
#[derive(Debug)]
pub struct BankAccount {
account_no: String,
money: Arc<Mutex<f64>>
}
impl BankAccount {
pub fn withdrawl(&mut self, amount: f64) -> f64 {
let mut balance = self.money.lock().unwrap();
*balance -= amount;
// println!("Acc no : {} -> {:?} withdrawn, left {}", self.account_no, amount, *balance);
*balance
}
pub fn deposit(&mut self, amount: f64) -> f64 {
let mut balance = self.money.lock().unwrap();
*balance += amount;
// println!("Acc no : {} -> {:?} deposited, left {}", self.account_no, amount, *balance);
*balance
}
}
#[derive(Debug)]
struct Person {
name: String,
withdrawl_limit: f64,
deposit_limit: f64
}
pub fn shopping_arc_test() {
let mut ba = BankAccount {
account_no: "ICICI".to_string(),
money: Arc::new(Mutex::new(10000 as f64))
};
let person = Person {
name: "Adult 1".to_string(),
withdrawl_limit: 1910 as f64,
deposit_limit: 1200 as f64,
};
let t1 = thread::spawn(move || {
loop {
let money = ba.withdrawl(person.withdrawl_limit);
thread::sleep(time::Duration::from_millis(100));
if money <= 0.0 {
println!("{:?} -> {} is out of money !", person.name, ba.account_no);
break;
}
}
});
let mut ba1 = BankAccount {
account_no: "YES!".to_string(),
money: Arc::new(Mutex::new(10000 as f64))
};
let person1 = Person {
name: "Adult 2".to_string(),
withdrawl_limit: 1257 as f64,
deposit_limit: 1200 as f64,
};
let t2 = thread::spawn(move || {
loop {
let money = ba1.withdrawl(person1.withdrawl_limit);
thread::sleep(time::Duration::from_millis(100));
if money <= 0.0 {
println!("{:?} -> {} is out of money !", person1.name, ba1.account_no);
break;
}
}
});
t1.join().unwrap();
t2.join().unwrap();
let mut ba2 = BankAccount {
account_no: "HDFC".to_string(),
money: Arc::new(Mutex::new(10000 as f64))
};
let ba3 = Arc::new(Mutex::new(BankAccount {
account_no: "SBI".to_string(),
money: Arc::new(Mutex::new(10000 as f64))
}));
let person2 = Arc::new(Person {
name: "Adult 1".to_string(),
withdrawl_limit: 786 as f64,
deposit_limit: 800 as f64
});
let person2_clone1 = person2.clone();
let t3 = thread::spawn(move || {
loop {
let money = ba2.withdrawl(person2_clone1.withdrawl_limit);
thread::sleep(time::Duration::from_millis(100));
if money <= 0.0 {
println!("{:?} -> {} is out of money !", person2_clone1.name, ba2.account_no);
break;
}
}
});
let person2_clone2 = person2.clone();
let ba3_clone1 = ba3.clone();
let t4 = thread::spawn(move || {
loop {
{
let mut ba3_unwrapped = ba3_clone1.lock().unwrap();
let money = ba3_unwrapped.withdrawl(person2_clone2.withdrawl_limit);
if money <= 0.0 {
println!("T4 {:?} -> {} is out of money !", person2_clone2.name, ba3_unwrapped.account_no);
break;
} else {
println!("T4 withdraw -> balance {:?}", money);
}
}
thread::sleep(time::Duration::from_millis(100));
}
});
let person2_clone3 = person2.clone();
let ba3_clone1 = ba3.clone();
let t5 = thread::spawn(move || {
loop {
{
let mut ba3_unwrapped = ba3_clone1.lock().unwrap();
let money = ba3_unwrapped.withdrawl(person2_clone3.withdrawl_limit);
if money <= 0.0 {
println!("{:?} -> {} is out of money !", person2_clone3.name, ba3_unwrapped.account_no);
break;
} else {
println!("T5 withdraw -> balance {:?}", money);
}
}
thread::sleep(time::Duration::from_millis(100));
}
});
let person2_clone3 = person2.clone();
let ba3_clone2 = ba3.clone();
let t6 = thread::spawn(move || {
loop {
{
let mut ba3_unwrapped = ba3_clone2.lock().unwrap();
let money = ba3_unwrapped.deposit(person2_clone3.deposit_limit);
if money >= 0.0 {
println!("T6 {:?} -> {} is deposited money -> {}!", person2_clone3.name, ba3_unwrapped.account_no, money);
} else {
break;
}
}
thread::sleep(time::Duration::from_millis(300));
}
});
t3.join().unwrap();
t4.join().unwrap();
t5.join().unwrap();
t6.join().unwrap();
}
|
use rusoto_core::{RusotoError, RusotoFuture};
use rusoto_s3::CreateBucketError::{BucketAlreadyExists, BucketAlreadyOwnedByYou};
use rusoto_s3::*;
use std::cell::RefCell;
use std::io::Read;
use std::rc::Rc;
#[derive(Debug)]
pub struct PutObjectData {
pub bucket: String,
pub key: String,
pub body: Vec<u8>,
pub content_md5: Option<String>,
}
#[derive(Default)]
pub struct S3Mock {
pub create_bucket_requests: Rc<RefCell<Vec<CreateBucketRequest>>>,
pub create_bucket_error: Option<CreateBucketError>,
pub put_bucket_policy_requests: Rc<RefCell<Vec<PutBucketPolicyRequest>>>,
pub put_bucket_policy_error: bool,
pub put_object_requests: Rc<RefCell<Vec<PutObjectData>>>,
}
impl S3 for S3Mock {
fn create_bucket(
&self,
request: CreateBucketRequest,
) -> RusotoFuture<CreateBucketOutput, CreateBucketError> {
self.create_bucket_requests.borrow_mut().push(request);
match &self.create_bucket_error {
None => Ok(Default::default()).into(),
Some(e) => match e {
BucketAlreadyOwnedByYou(msg) => Err(RusotoError::Service(BucketAlreadyOwnedByYou(
msg.to_string(),
)))
.into(),
BucketAlreadyExists(msg) => {
Err(RusotoError::Service(BucketAlreadyExists(msg.to_string()))).into()
}
},
}
}
fn delete_public_access_block(
&self,
_input: DeletePublicAccessBlockRequest,
) -> RusotoFuture<(), DeletePublicAccessBlockError> {
Ok(()).into()
}
fn put_bucket_policy(
&self,
request: PutBucketPolicyRequest,
) -> RusotoFuture<(), PutBucketPolicyError> {
self.put_bucket_policy_requests.borrow_mut().push(request);
if self.put_bucket_policy_error {
Err(RusotoError::ParseError("".to_owned())).into()
} else {
Ok(()).into()
}
}
fn put_object(
&self,
request: PutObjectRequest,
) -> RusotoFuture<PutObjectOutput, PutObjectError> {
let mut body = vec![];
request
.body
.unwrap()
.into_blocking_read()
.read_to_end(&mut body)
.unwrap();
self.put_object_requests.borrow_mut().push(PutObjectData {
bucket: request.bucket,
key: request.key,
body,
content_md5: request.content_md5,
});
Ok(Default::default()).into()
}
fn abort_multipart_upload(
&self,
_input: AbortMultipartUploadRequest,
) -> RusotoFuture<AbortMultipartUploadOutput, AbortMultipartUploadError> {
unimplemented!();
}
fn complete_multipart_upload(
&self,
_: CompleteMultipartUploadRequest,
) -> RusotoFuture<CompleteMultipartUploadOutput, CompleteMultipartUploadError> {
unimplemented!()
}
fn copy_object(&self, _: CopyObjectRequest) -> RusotoFuture<CopyObjectOutput, CopyObjectError> {
unimplemented!()
}
fn create_multipart_upload(
&self,
_: CreateMultipartUploadRequest,
) -> RusotoFuture<CreateMultipartUploadOutput, CreateMultipartUploadError> {
unimplemented!()
}
fn delete_bucket(&self, _: DeleteBucketRequest) -> RusotoFuture<(), DeleteBucketError> {
unimplemented!()
}
fn delete_bucket_analytics_configuration(
&self,
_: DeleteBucketAnalyticsConfigurationRequest,
) -> RusotoFuture<(), DeleteBucketAnalyticsConfigurationError> {
unimplemented!()
}
fn delete_bucket_cors(
&self,
_: DeleteBucketCorsRequest,
) -> RusotoFuture<(), DeleteBucketCorsError> {
unimplemented!()
}
fn delete_bucket_encryption(
&self,
_: DeleteBucketEncryptionRequest,
) -> RusotoFuture<(), DeleteBucketEncryptionError> {
unimplemented!()
}
fn delete_bucket_inventory_configuration(
&self,
_: DeleteBucketInventoryConfigurationRequest,
) -> RusotoFuture<(), DeleteBucketInventoryConfigurationError> {
unimplemented!()
}
fn delete_bucket_lifecycle(
&self,
_: DeleteBucketLifecycleRequest,
) -> RusotoFuture<(), DeleteBucketLifecycleError> {
unimplemented!()
}
fn delete_bucket_metrics_configuration(
&self,
_: DeleteBucketMetricsConfigurationRequest,
) -> RusotoFuture<(), DeleteBucketMetricsConfigurationError> {
unimplemented!()
}
fn delete_bucket_policy(
&self,
_: DeleteBucketPolicyRequest,
) -> RusotoFuture<(), DeleteBucketPolicyError> {
unimplemented!()
}
fn delete_bucket_replication(
&self,
_: DeleteBucketReplicationRequest,
) -> RusotoFuture<(), DeleteBucketReplicationError> {
unimplemented!()
}
fn delete_bucket_tagging(
&self,
_: DeleteBucketTaggingRequest,
) -> RusotoFuture<(), DeleteBucketTaggingError> {
unimplemented!()
}
fn delete_bucket_website(
&self,
_: DeleteBucketWebsiteRequest,
) -> RusotoFuture<(), DeleteBucketWebsiteError> {
unimplemented!()
}
fn delete_object(
&self,
_: DeleteObjectRequest,
) -> RusotoFuture<DeleteObjectOutput, DeleteObjectError> {
unimplemented!()
}
fn delete_object_tagging(
&self,
_: DeleteObjectTaggingRequest,
) -> RusotoFuture<DeleteObjectTaggingOutput, DeleteObjectTaggingError> {
unimplemented!()
}
fn delete_objects(
&self,
_: DeleteObjectsRequest,
) -> RusotoFuture<DeleteObjectsOutput, DeleteObjectsError> {
unimplemented!()
}
fn get_bucket_accelerate_configuration(
&self,
_: GetBucketAccelerateConfigurationRequest,
) -> RusotoFuture<GetBucketAccelerateConfigurationOutput, GetBucketAccelerateConfigurationError>
{
unimplemented!()
}
fn get_bucket_acl(
&self,
_: GetBucketAclRequest,
) -> RusotoFuture<GetBucketAclOutput, GetBucketAclError> {
unimplemented!()
}
fn get_bucket_analytics_configuration(
&self,
_: GetBucketAnalyticsConfigurationRequest,
) -> RusotoFuture<GetBucketAnalyticsConfigurationOutput, GetBucketAnalyticsConfigurationError>
{
unimplemented!()
}
fn get_bucket_cors(
&self,
_: GetBucketCorsRequest,
) -> RusotoFuture<GetBucketCorsOutput, GetBucketCorsError> {
unimplemented!()
}
fn get_bucket_encryption(
&self,
_: GetBucketEncryptionRequest,
) -> RusotoFuture<GetBucketEncryptionOutput, GetBucketEncryptionError> {
unimplemented!()
}
fn get_bucket_inventory_configuration(
&self,
_: GetBucketInventoryConfigurationRequest,
) -> RusotoFuture<GetBucketInventoryConfigurationOutput, GetBucketInventoryConfigurationError>
{
unimplemented!()
}
fn get_bucket_lifecycle(
&self,
_: GetBucketLifecycleRequest,
) -> RusotoFuture<GetBucketLifecycleOutput, GetBucketLifecycleError> {
unimplemented!()
}
fn get_bucket_lifecycle_configuration(
&self,
_: GetBucketLifecycleConfigurationRequest,
) -> RusotoFuture<GetBucketLifecycleConfigurationOutput, GetBucketLifecycleConfigurationError>
{
unimplemented!()
}
fn get_bucket_location(
&self,
_: GetBucketLocationRequest,
) -> RusotoFuture<GetBucketLocationOutput, GetBucketLocationError> {
unimplemented!()
}
fn get_bucket_logging(
&self,
_: GetBucketLoggingRequest,
) -> RusotoFuture<GetBucketLoggingOutput, GetBucketLoggingError> {
unimplemented!()
}
fn get_bucket_metrics_configuration(
&self,
_: GetBucketMetricsConfigurationRequest,
) -> RusotoFuture<GetBucketMetricsConfigurationOutput, GetBucketMetricsConfigurationError> {
unimplemented!()
}
fn get_bucket_notification(
&self,
_: GetBucketNotificationConfigurationRequest,
) -> RusotoFuture<NotificationConfigurationDeprecated, GetBucketNotificationError> {
unimplemented!()
}
fn get_bucket_notification_configuration(
&self,
_: GetBucketNotificationConfigurationRequest,
) -> RusotoFuture<NotificationConfiguration, GetBucketNotificationConfigurationError> {
unimplemented!()
}
fn get_bucket_policy(
&self,
_: GetBucketPolicyRequest,
) -> RusotoFuture<GetBucketPolicyOutput, GetBucketPolicyError> {
unimplemented!()
}
fn get_bucket_replication(
&self,
_: GetBucketReplicationRequest,
) -> RusotoFuture<GetBucketReplicationOutput, GetBucketReplicationError> {
unimplemented!()
}
fn get_bucket_request_payment(
&self,
_: GetBucketRequestPaymentRequest,
) -> RusotoFuture<GetBucketRequestPaymentOutput, GetBucketRequestPaymentError> {
unimplemented!()
}
fn get_bucket_tagging(
&self,
_: GetBucketTaggingRequest,
) -> RusotoFuture<GetBucketTaggingOutput, GetBucketTaggingError> {
unimplemented!()
}
fn get_bucket_versioning(
&self,
_: GetBucketVersioningRequest,
) -> RusotoFuture<GetBucketVersioningOutput, GetBucketVersioningError> {
unimplemented!()
}
fn get_bucket_website(
&self,
_: GetBucketWebsiteRequest,
) -> RusotoFuture<GetBucketWebsiteOutput, GetBucketWebsiteError> {
unimplemented!()
}
fn get_object(&self, _: GetObjectRequest) -> RusotoFuture<GetObjectOutput, GetObjectError> {
unimplemented!()
}
fn get_object_acl(
&self,
_: GetObjectAclRequest,
) -> RusotoFuture<GetObjectAclOutput, GetObjectAclError> {
unimplemented!()
}
fn get_object_tagging(
&self,
_: GetObjectTaggingRequest,
) -> RusotoFuture<GetObjectTaggingOutput, GetObjectTaggingError> {
unimplemented!()
}
fn get_object_torrent(
&self,
_: GetObjectTorrentRequest,
) -> RusotoFuture<GetObjectTorrentOutput, GetObjectTorrentError> {
unimplemented!()
}
fn head_bucket(&self, _: HeadBucketRequest) -> RusotoFuture<(), HeadBucketError> {
unimplemented!()
}
fn head_object(&self, _: HeadObjectRequest) -> RusotoFuture<HeadObjectOutput, HeadObjectError> {
unimplemented!()
}
fn list_bucket_analytics_configurations(
&self,
_: ListBucketAnalyticsConfigurationsRequest,
) -> RusotoFuture<ListBucketAnalyticsConfigurationsOutput, ListBucketAnalyticsConfigurationsError>
{
unimplemented!()
}
fn list_bucket_inventory_configurations(
&self,
_: ListBucketInventoryConfigurationsRequest,
) -> RusotoFuture<ListBucketInventoryConfigurationsOutput, ListBucketInventoryConfigurationsError>
{
unimplemented!()
}
fn list_bucket_metrics_configurations(
&self,
_: ListBucketMetricsConfigurationsRequest,
) -> RusotoFuture<ListBucketMetricsConfigurationsOutput, ListBucketMetricsConfigurationsError>
{
unimplemented!()
}
fn list_buckets(&self) -> RusotoFuture<ListBucketsOutput, ListBucketsError> {
unimplemented!()
}
fn list_multipart_uploads(
&self,
_: ListMultipartUploadsRequest,
) -> RusotoFuture<ListMultipartUploadsOutput, ListMultipartUploadsError> {
unimplemented!()
}
fn list_object_versions(
&self,
_: ListObjectVersionsRequest,
) -> RusotoFuture<ListObjectVersionsOutput, ListObjectVersionsError> {
unimplemented!()
}
fn list_objects(
&self,
_: ListObjectsRequest,
) -> RusotoFuture<ListObjectsOutput, ListObjectsError> {
unimplemented!()
}
fn list_objects_v2(
&self,
_: ListObjectsV2Request,
) -> RusotoFuture<ListObjectsV2Output, ListObjectsV2Error> {
unimplemented!()
}
fn list_parts(&self, _: ListPartsRequest) -> RusotoFuture<ListPartsOutput, ListPartsError> {
unimplemented!()
}
fn put_bucket_accelerate_configuration(
&self,
_: PutBucketAccelerateConfigurationRequest,
) -> RusotoFuture<(), PutBucketAccelerateConfigurationError> {
unimplemented!()
}
fn put_bucket_acl(&self, _: PutBucketAclRequest) -> RusotoFuture<(), PutBucketAclError> {
unimplemented!()
}
fn put_bucket_analytics_configuration(
&self,
_: PutBucketAnalyticsConfigurationRequest,
) -> RusotoFuture<(), PutBucketAnalyticsConfigurationError> {
unimplemented!()
}
fn put_bucket_cors(&self, _: PutBucketCorsRequest) -> RusotoFuture<(), PutBucketCorsError> {
unimplemented!()
}
fn put_bucket_encryption(
&self,
_: PutBucketEncryptionRequest,
) -> RusotoFuture<(), PutBucketEncryptionError> {
unimplemented!()
}
fn put_bucket_inventory_configuration(
&self,
_: PutBucketInventoryConfigurationRequest,
) -> RusotoFuture<(), PutBucketInventoryConfigurationError> {
unimplemented!()
}
fn put_bucket_lifecycle(
&self,
_: PutBucketLifecycleRequest,
) -> RusotoFuture<(), PutBucketLifecycleError> {
unimplemented!()
}
fn put_bucket_lifecycle_configuration(
&self,
_: PutBucketLifecycleConfigurationRequest,
) -> RusotoFuture<(), PutBucketLifecycleConfigurationError> {
unimplemented!()
}
fn put_bucket_logging(
&self,
_: PutBucketLoggingRequest,
) -> RusotoFuture<(), PutBucketLoggingError> {
unimplemented!()
}
fn put_bucket_metrics_configuration(
&self,
_: PutBucketMetricsConfigurationRequest,
) -> RusotoFuture<(), PutBucketMetricsConfigurationError> {
unimplemented!()
}
fn put_bucket_notification(
&self,
_: PutBucketNotificationRequest,
) -> RusotoFuture<(), PutBucketNotificationError> {
unimplemented!()
}
fn put_bucket_notification_configuration(
&self,
_: PutBucketNotificationConfigurationRequest,
) -> RusotoFuture<(), PutBucketNotificationConfigurationError> {
unimplemented!()
}
fn put_bucket_replication(
&self,
_: PutBucketReplicationRequest,
) -> RusotoFuture<(), PutBucketReplicationError> {
unimplemented!()
}
fn put_bucket_request_payment(
&self,
_: PutBucketRequestPaymentRequest,
) -> RusotoFuture<(), PutBucketRequestPaymentError> {
unimplemented!()
}
fn put_bucket_tagging(
&self,
_: PutBucketTaggingRequest,
) -> RusotoFuture<(), PutBucketTaggingError> {
unimplemented!()
}
fn put_bucket_versioning(
&self,
_: PutBucketVersioningRequest,
) -> RusotoFuture<(), PutBucketVersioningError> {
unimplemented!()
}
fn put_bucket_website(
&self,
_: PutBucketWebsiteRequest,
) -> RusotoFuture<(), PutBucketWebsiteError> {
unimplemented!()
}
fn put_object_acl(
&self,
_: PutObjectAclRequest,
) -> RusotoFuture<PutObjectAclOutput, PutObjectAclError> {
unimplemented!()
}
fn put_object_tagging(
&self,
_: PutObjectTaggingRequest,
) -> RusotoFuture<PutObjectTaggingOutput, PutObjectTaggingError> {
unimplemented!()
}
fn restore_object(
&self,
_: RestoreObjectRequest,
) -> RusotoFuture<RestoreObjectOutput, RestoreObjectError> {
unimplemented!()
}
fn select_object_content(
&self,
_: SelectObjectContentRequest,
) -> RusotoFuture<SelectObjectContentOutput, SelectObjectContentError> {
unimplemented!()
}
fn upload_part(&self, _: UploadPartRequest) -> RusotoFuture<UploadPartOutput, UploadPartError> {
unimplemented!()
}
fn upload_part_copy(
&self,
_: UploadPartCopyRequest,
) -> RusotoFuture<UploadPartCopyOutput, UploadPartCopyError> {
unimplemented!()
}
fn get_bucket_policy_status(
&self,
_input: GetBucketPolicyStatusRequest,
) -> RusotoFuture<GetBucketPolicyStatusOutput, GetBucketPolicyStatusError> {
unimplemented!();
}
fn get_object_legal_hold(
&self,
_input: GetObjectLegalHoldRequest,
) -> RusotoFuture<GetObjectLegalHoldOutput, GetObjectLegalHoldError> {
unimplemented!();
}
fn get_object_lock_configuration(
&self,
_input: GetObjectLockConfigurationRequest,
) -> RusotoFuture<GetObjectLockConfigurationOutput, GetObjectLockConfigurationError> {
unimplemented!();
}
fn get_object_retention(
&self,
_input: GetObjectRetentionRequest,
) -> RusotoFuture<GetObjectRetentionOutput, GetObjectRetentionError> {
unimplemented!();
}
fn get_public_access_block(
&self,
_input: GetPublicAccessBlockRequest,
) -> RusotoFuture<GetPublicAccessBlockOutput, GetPublicAccessBlockError> {
unimplemented!();
}
fn put_object_legal_hold(
&self,
_input: PutObjectLegalHoldRequest,
) -> RusotoFuture<PutObjectLegalHoldOutput, PutObjectLegalHoldError> {
unimplemented!();
}
fn put_object_lock_configuration(
&self,
_input: PutObjectLockConfigurationRequest,
) -> RusotoFuture<PutObjectLockConfigurationOutput, PutObjectLockConfigurationError> {
unimplemented!();
}
fn put_object_retention(
&self,
_input: PutObjectRetentionRequest,
) -> RusotoFuture<PutObjectRetentionOutput, PutObjectRetentionError> {
unimplemented!();
}
fn put_public_access_block(
&self,
_input: PutPublicAccessBlockRequest,
) -> RusotoFuture<(), PutPublicAccessBlockError> {
unimplemented!();
}
}
|
// Day 3 2019
use std::cmp::Ordering;
use std::collections::BTreeSet;
#[cfg(test)]
mod tests {
use crate::day3::*;
#[test]
fn test_string_to_path() {
let path = vec![
Pathlet::Right(8),
Pathlet::Up(5),
Pathlet::Left(5),
Pathlet::Down(3),
];
assert_eq!(string_to_path("R8,U5,L5,D3"), path);
}
#[test]
fn test_trace_wire() {
assert_eq!(
trace_wire(vec![
Pathlet::Right(2),
Pathlet::Up(2),
Pathlet::Left(2),
Pathlet::Down(2),
]),
vec![
Line::new(Point::new(0, 0), Point::new(2, 0)),
Line::new(Point::new(2, 0), Point::new(2, 2)),
Line::new(Point::new(2, 2), Point::new(0, 2)),
Line::new(Point::new(0, 2), Point::new(0, 0)),
]
);
}
#[test]
fn test_wire_intersections() {
let red = trace_wire(string_to_path("R8,U5,L5,D3"));
let green = trace_wire(string_to_path("U7,R6,D4,L4"));
let intersections = wire_intersections(&red, &green);
assert_eq!(
wire_intersections(&red, &green),
vec![Point::new(6, 5), Point::new(3, 3)]
);
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum Pathlet {
Right(usize),
Up(usize),
Left(usize),
Down(usize),
}
#[derive(Clone, Copy, Debug, Eq)]
pub struct Point {
x: isize,
y: isize,
}
impl Point {
fn new(x: isize, y: isize) -> Point {
Point { x, y }
}
pub fn origin() -> Point {
Point { x: 0, y: 0 }
}
fn to_tuple(&self) -> (isize, isize) {
(self.x, self.y)
}
pub fn distance(&self) -> usize {
manhattan_distance(Point::origin(), *self)
}
fn x(&self) -> isize {
self.x
}
fn y(&self) -> isize {
self.y
}
fn x_plus_equals(&mut self, x: isize) -> Self {
self.x += x;
*self
}
fn y_plus_equals(&mut self, y: isize) -> Self {
self.y += y;
*self
}
}
impl Into<(isize, isize)> for Point {
fn into(self) -> (isize, isize) {
self.to_tuple()
}
}
impl Ord for Point {
fn cmp(&self, other: &Self) -> Ordering {
// If the distance between the points is the same, but the points are different,
// self is less than other.
if self.distance() == other.distance() && self != other {
if self.x != other.x {
return self.x.cmp(&other.x);
} else {
return self.y.cmp(&other.y);
}
}
self.distance().cmp(&other.distance())
}
}
impl PartialOrd for Point {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Point {
fn eq(&self, other: &Self) -> bool {
self.x == other.x && self.y == other.y
}
}
/// The vertical or horizontal line between two points
#[derive(PartialEq)]
pub struct Line {
p1: Point,
p2: Point,
}
impl Line {
pub fn new(p1: Point, p2: Point) -> Line {
Line { p1, p2 }
}
fn p1(&self) -> Point {
self.p1
}
fn p2(&self) -> Point {
self.p2
}
fn x_min(&self) -> isize {
std::cmp::min(self.p1.x(), self.p2.x())
}
fn x_max(&self) -> isize {
std::cmp::max(self.p1.x(), self.p2.x())
}
fn y_min(&self) -> isize {
std::cmp::min(self.p1.y(), self.p2.y())
}
fn y_max(&self) -> isize {
std::cmp::max(self.p1.y(), self.p2.y())
}
pub fn find_intersection(&self, other: &Self) -> Option<Point> {
// Test the reverse if the first didn't find a point, because
// intersects only finds a if the first line is completely within the
// bounding box of the second line.
self.intersects(other).or(other.intersects(self))
}
fn intersects(&self, other: &Self) -> Option<Point> {
if self.x_min() >= other.x_min()
&& other.x_max() >= self.x_min()
&& other.y_min() >= self.y_min()
&& self.y_max() >= other.y_min()
{
return Some(Point::new(self.x_min(), other.y_min()));
}
None
}
/// The length of the line
pub fn length(&self) -> usize {
// Since the line is either vertical or horizontal, Manhattan Distance is accurate
manhattan_distance(self.p1, self.p2)
}
pub fn contains_point(&self, point: &Point) -> bool {
// Case 1: Line is vertical so both x coords are equal. Contains point if point.x equals
// line.x and line.min_y <= point.y <= line.max_y.
//
// Case 2: Line is horizontal, so both y coords are equal. Contains point according to the
// above condition, swap x and y.
(point.x() == self.p1.x && point.y() <= self.y_max() && point.y() >= self.y_min())
|| (point.y() == self.p1.y && point.x() <= self.x_max() && point.x() >= self.x_min())
}
}
impl std::fmt::Debug for Line {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Line {{({}, {}) -> ({}, {})}}",
self.p1.x, self.p1.y, self.p2.x, self.p2.y
)
}
}
/// Calculate the manhattan distance between two points
pub fn manhattan_distance<T, U>(p: T, q: U) -> usize
where
T: Into<(isize, isize)>,
U: Into<(isize, isize)>,
{
let p = p.into();
let q = q.into();
((p.0 - q.0).abs() + (p.1 - q.1).abs()) as usize
}
/// Converts a vector of Pathlets to a BTreeSet of it's points, sorted by manhattan distance to
/// the origin
pub fn trace_wire(path: Vec<Pathlet>) -> Vec<Line> {
let mut wire = vec![];
let mut location = Point::origin();
for pathlet in path {
match pathlet {
Pathlet::Right(distance) => wire.push(Line::new(
location,
location.x_plus_equals(distance as isize),
)),
Pathlet::Up(distance) => wire.push(Line::new(
location,
location.y_plus_equals(distance as isize),
)),
Pathlet::Left(distance) => wire.push(Line::new(
location,
location.x_plus_equals(-(distance as isize)),
)),
Pathlet::Down(distance) => wire.push(Line::new(
location,
location.y_plus_equals(-(distance as isize)),
)),
}
}
wire
}
pub fn wire_intersections(red: &Vec<Line>, green: &Vec<Line>) -> Vec<Point> {
let mut intersections = vec![];
for red_line in red {
for green_line in green {
red_line
.find_intersection(&green_line)
.and_then(|x| Some(intersections.push(x)));
}
}
// The intersection at (0, 0) doesn't count
intersections.split_off(1)
}
/// Distance from the origin to a point along a wire. Point must be in the wire.
pub fn wire_length_to(wire: &Vec<Line>, point: &Point) -> usize {
let mut distance = 0;
for line in wire {
if line.contains_point(point) {
// The line starts at p1, so the distance for this segment is the
// distance between start of the line and the point.
distance += manhattan_distance(line.p1(), *point);
break;
}
distance += line.length();
}
distance
}
/// Converts a comma seperated list of Pathlet strs to a vector of Pathlets
pub fn string_to_path(string: &str) -> Vec<Pathlet> {
use std::str::FromStr;
string
.split(",")
.map(|x| {
let (direction, distance) = x.split_at(1);
let distance = usize::from_str(distance).unwrap();
match direction {
"R" => Pathlet::Right(distance),
"U" => Pathlet::Up(distance),
"L" => Pathlet::Left(distance),
"D" => Pathlet::Down(distance),
&_ => panic!("Unexpected pathlet direction {}", direction),
}
})
.collect()
}
|
pub mod opcode;
pub mod instruction;
|
use core::ops::Drop;
use core::sync::atomic;
extern "C" {
static mut idt_entry: [IdtEntry; 256];
static mut idt_handlers: [u64; 256];
static mut idt_descriptor: IdtDescriptor;
fn idt_init();
}
pub const IDT_INTERRUPT_16: u8 = 0x6;
pub const IDT_TRAP_16: u8 = 0x7;
pub const IDT_INTERRUPT_64: u8 = 0xE;
pub const IDT_TRAP_64: u8 = 0xF;
#[repr(C, packed)]
struct IdtEntry {
lowbits: u16,
selector: u16,
reserved_0: u8,
attribute: u8,
midbits: u16,
hibits: u32,
reserved_1: u32,
}
#[repr(C, packed)]
struct IdtDescriptor {
limit: u16,
base: u64,
}
/// Set IF flag, enable interrupt
#[inline]
pub unsafe fn sti() {
asm!("sti");
}
/// Clear IF flag, disable interrupt
#[inline]
pub unsafe fn cli() {
asm!("cli");
}
/// Load IDT descriptor
#[inline]
unsafe fn lidt(ptr: *const IdtDescriptor) {
asm!("lidt [rax]" : : "{rax}"(ptr) : : "intel", "volatile");
}
/// Breakpoint
#[inline]
pub unsafe fn int3() {
asm!("int3");
}
/// Check if interrupt enabled
#[inline]
pub unsafe fn check_int() -> bool {
let flags: u64;
asm!(r#"
pushfq
pop $0
"# : "=r" (flags) : : );
return flags & (1 << 9) != 0;
}
type Handler = Option<fn(u64, u64)>;
static mut INTERRUPT_HANDLERS: [Handler; 256] = [None; 256];
/// interrupt handler dispatcher
#[no_mangle]
pub extern "C" fn int_handler(vector: u64, error_code: u64) {
println!("vector: {}, error_code: {}", vector, error_code);
unsafe {
if let Some(ref handler) = INTERRUPT_HANDLERS[vector as usize] {
handler(vector, error_code);
}
}
}
static IDT_LOCK: atomic::AtomicBool = atomic::ATOMIC_BOOL_INIT;
/// A locked instance of IDT
pub struct IDT(bool);
impl IDT {
/// Get an IDT instance
pub fn get() -> Self {
while !IDT_LOCK.compare_and_swap(false, true, atomic::Ordering::Relaxed) {}
atomic::fence(atomic::Ordering::Acquire);
IDT(true)
}
pub unsafe fn set_entry(&self, idx: usize, handler: u64, selector: u16, dpl: u8, etype: u8) {
let entry = &mut idt_entry[idx];
entry.lowbits = (handler & 0xffff) as u16;
entry.selector = selector;
entry.reserved_0 = 0;
entry.attribute = 0x80 | (dpl << 5) | etype;
entry.midbits = ((handler >> 16) & 0xffff) as u16;
entry.hibits = ((handler >> 32) & 0xffffffff) as u32;
entry.reserved_1 = 0;
}
/// Set an entry as trap handler (does not clear IF when called)
pub fn set_kernel_trap(&self, idx: usize, handler: u64) {
unsafe {
self.set_entry(idx, handler, ::arch::gdt::GDT_64_CODE, 0, IDT_TRAP_64);
}
}
/// Set an entry as interrupt handler (clear IF when called)
pub fn set_kernel_isr(&self, idx: usize, handler: u64) {
unsafe {
self.set_entry(idx, handler, ::arch::gdt::GDT_64_CODE, 0, IDT_INTERRUPT_64);
}
}
/// Re-load IDT
pub fn flush(&self) {
unsafe {
lidt(&idt_descriptor as *const IdtDescriptor);
}
}
/// Register an ISR handler
pub fn register_isr(&self, idx: usize, handler: fn(u64, u64)) -> bool {
unsafe {
if let Some(ref _hndlr) = INTERRUPT_HANDLERS[idx] {
return false;
}
INTERRUPT_HANDLERS[idx] = Some(handler);
}
true
}
/// Unregister an ISR handler
pub fn unregister_isr(&self, idx: usize) -> bool {
unsafe {
if let None = INTERRUPT_HANDLERS[idx] {
return false;
}
INTERRUPT_HANDLERS[idx] = None;
}
true
}
}
impl Drop for IDT {
fn drop(&mut self) {
IDT_LOCK.store(false, atomic::Ordering::Release);
}
}
/// Initialize IDT
pub fn init() {
let idt = IDT::get();
for i in 0..256 {
if i == 3 {
unsafe {
idt.set_entry(
i,
idt_handlers[i],
::arch::gdt::GDT_64_CODE,
3,
IDT_INTERRUPT_64,
);
}
} else {
unsafe {
idt.set_entry(
i,
idt_handlers[i],
::arch::gdt::GDT_64_CODE,
0,
IDT_INTERRUPT_64,
);
}
}
}
idt.flush();
}
|
struct Foo;
trait Bar {
fn bar();
}
impl Bar for Foo {
fn bar() {
}
}
|
use bb3::custom_ser::to_resp;
use bb3::RedisType;
use building_block_03 as bb3;
use std::io::prelude::*;
use std::io::BufReader;
use std::net::TcpListener;
use std::net::TcpStream;
fn main() {
let addr = "127.0.0.1:6379";
println!("Listening {}", addr);
println!("Please use redis-cli to have a try");
let listener = TcpListener::bind(addr).unwrap();
for stream in listener.incoming() {
if let Ok(s) = stream {
std::thread::spawn(move || handle_stream(s));
}
}
}
fn handle_stream(mut s: TcpStream) {
let mut reader = BufReader::new(&mut s);
loop {
let msg = match bb3::from_reader(&mut reader) {
Err(e) => {
println!("{:?}", e);
RedisType::Str(format!("{}", e))
}
Ok(r) => {
println!("{:?}", r);
gen_resp(r)
}
};
let resp = to_resp(&msg).unwrap();
if reader.get_mut().write_all(resp.as_bytes()).is_err() {
break;
}
}
}
fn gen_resp(r: RedisType) -> RedisType {
match r {
RedisType::Str(str_val) => {
if &str_val.to_uppercase() == "PING" {
RedisType::Str("PONG".to_owned())
} else {
RedisType::BulkStr(str_val)
}
}
RedisType::Array(cmds) => {
if cmds.is_empty() {
return RedisType::Str("".to_owned());
}
if let RedisType::Str(first) = cmds.get(0).unwrap() {
if first.to_uppercase() == "PING" {
if let Some(v) = cmds.get(1) {
if let RedisType::Str(s) = v {
return RedisType::BulkStr(s.clone());
}
return v.clone();
} else {
return RedisType::Str("PONG".to_owned());
}
}
}
return RedisType::BulkStr(format!("{:?}", cmds));
}
v => RedisType::BulkStr(format!("{:?}", v)),
}
}
|
pub mod auth_token;
|
use std::collections::HashSet;
use proconio::input;
fn main() {
input! {
n: i64
}
println!("{}", f(n));
}
fn f(n: i64) -> i64 {
let mut values : HashSet<i64> = HashSet::new();
let mut a = 2;
while a * a <= n {
let mut v = a * a;
while v <= n {
values.insert(v);
v *= a
}
a += 1;
}
n - (values.len() as i64)
} |
mod internals;
use crate::parking_lot::internals::{MyResult, ParkLotIntern, Slot};
use failure::{bail, Error};
use std::string::ToString;
#[derive(Copy, Clone, Debug)]
enum Action<'a> {
Create(usize),
Park(&'a str, &'a str),
Leave(usize),
Status,
SlotNumbersForColor(&'a str),
SlotNumbersForRegNo(&'a str),
RegNumbersForColor(&'a str),
}
pub(crate) fn stringify(to_print: Result<Vec<String>, Error>) -> String {
match to_print {
Ok(items) => items.join("\n"),
Err(err) => format!("{}", err),
}
}
pub(crate) struct ParkingLot {
internal: internals::ParkLotIntern,
}
impl ParkingLot {
pub(crate) fn new() -> Self {
ParkingLot {
internal: ParkLotIntern::new(),
}
}
fn process(&mut self, action: Action) -> MyResult {
match action {
Action::Create(capacity) => self.internal.alloc(capacity),
Action::Park(reg_num, color) => self.internal.park(reg_num, color),
Action::Leave(slot_no) => self.internal.leave(slot_no),
Action::Status => self.internal.status(),
Action::RegNumbersForColor(color) => {
self.internal.query(&|_, slot| -> Option<String> {
match slot {
Slot::Occupied(car) => {
if car.color_match(color) {
Some(car.reg_num())
} else {
None
}
}
Slot::Empty => None,
}
})
}
Action::SlotNumbersForColor(color) => {
self.internal.query(&|idx, slot| -> Option<String> {
match slot {
Slot::Occupied(car) => {
if car.color_match(color) {
Some(idx.to_string())
} else {
None
}
}
Slot::Empty => None,
}
})
}
Action::SlotNumbersForRegNo(reg_num) => {
self.internal.query(&|idx, slot| -> Option<String> {
match slot {
Slot::Occupied(car) => {
if car.reg_num_match(reg_num) {
Some(idx.to_string())
} else {
None
}
}
Slot::Empty => None,
}
})
}
}
}
/// Receive a command as a string, parse and execute it
///
/// # Example
/// ```
/// let mut parking = ParkingLot::new();
/// parking.repl("create_parking_lot 6");
/// ```
///
pub(crate) fn repl(&mut self, command: &str) -> MyResult {
let tokens: Vec<&str> = command.split_ascii_whitespace().collect();
let action = match &tokens[..] {
["create_parking_lot", capacity] => Action::Create(capacity.parse::<usize>()?),
["park", reg_num, color] => Action::Park(reg_num, color),
["leave", slot_no] => Action::Leave(slot_no.parse::<usize>()?),
["status"] => Action::Status,
["registration_numbers_for_cars_with_colour", color] => {
Action::RegNumbersForColor(color)
}
["slot_numbers_for_cars_with_colour", color] => Action::SlotNumbersForColor(color),
["slot_number_for_registration_number", reg_num] => {
Action::SlotNumbersForRegNo(reg_num)
}
_ => bail!("Can't parse command: {}", command),
};
self.process(action)
}
}
#[cfg(test)]
mod tests {
use super::{stringify, ParkingLot};
use failure::Error;
#[test]
fn test_repl_ok() -> Result<(), Error> {
let mut parking = ParkingLot::new();
assert_eq!(
&parking.repl("create_parking_lot 6")?.pop().unwrap(),
"Created a parking lot with 6 slots"
);
Ok(())
}
#[test]
fn test_repl_unknown_command() -> Result<(), Error> {
let mut parking = ParkingLot::new();
assert_eq!(
format!("{}", parking.repl("this is sparta").err().unwrap()),
"Can't parse command: this is sparta"
);
Ok(())
}
#[test]
fn test_repl_input_from_file() -> Result<(), Error> {
let input_raw = include_str!("test_input");
let expected_raw = include_str!("test_exp");
let mut parking = ParkingLot::new();
for (command, exp) in input_raw.lines().zip(expected_raw.lines()) {
let mut resp = stringify(parking.repl(command));
resp.retain(|c| c != '\n' && c != ' ');
let mut exp_norm = String::from(exp);
exp_norm.retain(|c| c != '\n' && c != ' ');
assert_eq!(resp, exp_norm);
}
Ok(())
}
}
|
use std::convert::TryFrom;
use std::{fs::File, path::Path, os::unix::io::AsRawFd};
use std::io::BufReader;
use tokio::prelude::*;
use tokio::reactor::PollEvented2;
use nix::{ioctl_none, ioctl_read};
use crate::error::{Result, ResultExt, Error, ErrorKind};
const DEFAULT_EVENT_FILE_PATH: &str = "/dev/surface_dtx";
#[derive(Debug)]
pub struct Device {
file: File,
}
impl Device {
pub fn open() -> Result<Self> {
Device::open_path(DEFAULT_EVENT_FILE_PATH)
}
pub fn open_path<P: AsRef<Path>>(path: P) -> Result<Self> {
let file = File::open(path).context(ErrorKind::DeviceAccess)?;
Ok(Device { file })
}
pub fn events(&self) -> Result<EventStream> {
EventStream::from_file(self.file.try_clone().context(ErrorKind::DeviceAccess)?)
}
#[allow(unused)]
pub fn commands(&self) -> Commands {
Commands { device: &self }
}
}
impl std::os::unix::io::AsRawFd for Device {
fn as_raw_fd(&self) -> std::os::unix::io::RawFd {
self.file.as_raw_fd()
}
}
pub struct EventStream {
reader: BufReader<PollEvented2<tokio_file_unix::File<File>>>,
}
impl EventStream {
fn from_file(file: File) -> Result<Self> {
let file = tokio_file_unix::File::new_nb(file).context(ErrorKind::DeviceAccess)?;
let reader = file.into_reader(&Default::default()).context(ErrorKind::DeviceAccess)?;
Ok(EventStream { reader })
}
}
impl Stream for EventStream {
type Item = RawEvent;
type Error = Error;
fn poll(&mut self) -> Poll<Option<RawEvent>, Error> {
let mut buf = [0; 4];
match self.reader.poll_read(&mut buf[..]) {
Ok(Async::NotReady) => {
Ok(Async::NotReady)
},
Ok(Async::Ready(4)) => {
let evt = RawEvent {
typ: buf[0],
code: buf[1],
arg0: buf[2],
arg1: buf[3],
};
Ok(Async::Ready(Some(evt)))
},
Ok(Async::Ready(_)) => {
Err(std::io::Error::new(std::io::ErrorKind::InvalidData, "incomplete event"))
.context(ErrorKind::DeviceIo)
.map_err(Into::into)
},
Err(e) => {
Err(e)
.context(ErrorKind::DeviceIo)
.map_err(Into::into)
},
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum OpMode {
Tablet,
Laptop,
Studio,
}
impl OpMode {
pub fn as_str(self) -> &'static str {
match self {
OpMode::Tablet => "tablet",
OpMode::Laptop => "laptop",
OpMode::Studio => "studio",
}
}
}
impl TryFrom<u8> for OpMode {
type Error = u8;
fn try_from(val: u8) -> std::result::Result<Self, Self::Error> {
match val {
0 => Ok(OpMode::Tablet),
1 => Ok(OpMode::Laptop),
2 => Ok(OpMode::Studio),
x => Err(x),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ConnectionState {
Disconnected,
Connected,
}
impl TryFrom<u8> for ConnectionState {
type Error = u8;
fn try_from(val: u8) -> std::result::Result<Self, Self::Error> {
match val {
0 => Ok(ConnectionState::Disconnected),
1 => Ok(ConnectionState::Connected),
x => Err(x),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum LatchState {
Closed,
Open,
}
impl TryFrom<u8> for LatchState {
type Error = u8;
fn try_from(val: u8) -> std::result::Result<Self, Self::Error> {
match val {
0 => Ok(LatchState::Closed),
1 => Ok(LatchState::Open),
x => Err(x),
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct RawEvent {
pub typ: u8,
pub code: u8,
pub arg0: u8,
pub arg1: u8,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Event {
OpModeChange {
mode: OpMode
},
ConectionChange {
state: ConnectionState,
arg1: u8
},
LatchStateChange {
state: LatchState
},
DetachError {
err: u8
},
DetachRequest,
}
impl TryFrom<RawEvent> for Event {
type Error = RawEvent;
fn try_from(evt: RawEvent) -> std::result::Result<Self, Self::Error> {
let evt = match evt {
RawEvent { typ: 0x11, code: 0x0c, arg0, arg1 } if arg0 <= 1 => {
Event::ConectionChange { state: ConnectionState::try_from(arg0).unwrap(), arg1 }
},
RawEvent { typ: 0x11, code: 0x0d, arg0, .. } if arg0 <= 2 => {
Event::OpModeChange { mode: OpMode::try_from(arg0).unwrap() }
},
RawEvent { typ: 0x11, code: 0x0e, .. } => {
Event::DetachRequest
},
RawEvent { typ: 0x11, code: 0x0f, arg0, .. } => {
Event::DetachError { err: arg0 }
},
RawEvent { typ: 0x11, code: 0x11, arg0, .. } if arg0 <= 1 => {
Event::LatchStateChange { state: LatchState::try_from(arg0).unwrap() }
},
_ => return Err(evt)
};
Ok(evt)
}
}
pub struct Commands<'a> {
device: &'a Device,
}
impl<'a> Commands<'a> {
#[allow(unused)]
pub fn latch_lock(&self) -> Result<()> {
unsafe { dtx_latch_lock(self.device.as_raw_fd()).context(ErrorKind::DeviceIo)? };
Ok(())
}
#[allow(unused)]
pub fn latch_unlock(&self) -> Result<()> {
unsafe { dtx_latch_unlock(self.device.as_raw_fd()).context(ErrorKind::DeviceIo)? };
Ok(())
}
#[allow(unused)]
pub fn latch_request(&self) -> Result<()> {
unsafe { dtx_latch_request(self.device.as_raw_fd()).context(ErrorKind::DeviceIo)? };
Ok(())
}
#[allow(unused)]
pub fn latch_open(&self) -> Result<()> {
unsafe { dtx_latch_open(self.device.as_raw_fd()).context(ErrorKind::DeviceIo)? };
Ok(())
}
#[allow(unused)]
pub fn get_opmode(&self) -> Result<OpMode> {
use std::io;
let mut opmode: u32 = 0;
unsafe {
dtx_get_opmode(self.device.as_raw_fd(), &mut opmode as *mut u32)
.context(ErrorKind::DeviceIo)?
};
match opmode {
0 => Ok(OpMode::Tablet),
1 => Ok(OpMode::Laptop),
2 => Ok(OpMode::Studio),
x => {
Err(io::Error::new(io::ErrorKind::InvalidData, "invalid opmode"))
.context(ErrorKind::DeviceIo)
.map_err(Into::into)
},
}
}
}
ioctl_none!(dtx_latch_lock, 0x11, 0x01);
ioctl_none!(dtx_latch_unlock, 0x11, 0x02);
ioctl_none!(dtx_latch_request, 0x11, 0x03);
ioctl_none!(dtx_latch_open, 0x11, 0x04);
ioctl_read!(dtx_get_opmode, 0x11, 0x05, u32);
|
use std::io;
use std::io::Read;
use std::fs::File;
use std::collections::HashMap;
// The quick ntpem fox jumped over rgw lazy dog.
fn thinger(ref row: &Vec<char>, char: char, i: isize, ref mut new_word: &mut String) {
match row.iter().position(|&x| x === char) {
Some(idxa) => {
if idx as isize + i < 0 {
new_word.push(*row.get((row.len() as isize + i) as usize).unwrap())
} else if idx as isize + i >= row.len() as isize {
new_word.push(*row.get(i as usize).unwrap())
} else {
new_word.push(*row.get((idx as isize + i) as usize).unwrap())
}
}
None => {},
}
}
fn check_word(word: &str) -> Vec<String> {
let top = vec!['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'];
let middle = vec!['a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l'];
let bottom = vec!['z', 'x', 'c', 'v', 'b', 'n', 'm'];
let mut new_words: Vec<String> = Vec::new();
let start: isize = -2;
let end: isize = 2;
for i in start..=end {
if i == 0 { continue; }
let mut new_word = String::new();
for char in word.chars() {
thinger(&top, char, i, &mut new_word);
thinger(&middle, char, i, &mut new_word);
thinger(&bottom, char, i, &mut new_word);
}
new_words.push(new_word);
}
new_words
}
fn main() {
let mut file = File::open("../../assets/enable1.txt").expect("file not found");
let mut strings = String::new();
file.read_to_string(&mut strings)
.expect("something went wrong");
let dict = strings
.split_whitespace()
.fold(HashMap::new(), |mut acc: HashMap<&str, bool>, c: &str| {
acc.insert(c, true);
acc
});
let mut input: String = String::new();
io::stdin().read_line(&mut input)
.expect("failed to read line");
let input = input.trim();
let mut output: String = String::new();
for word in input.split_whitespace() {
let _word = word.to_lowercase();
let _word = _word.trim_right_matches(".");
match dict.get(_word) {
Some(_) => { output.push_str(word) }
None => {
let possible_words: Vec<String> = check_word(&_word)
.into_iter()
.filter(
|ref possible_word: &String| dict.get(possible_word.as_str())
.is_some()
)
.collect();
let possible_words = possible_words
.into_iter()
.fold(String::new(), |mut acc, word| {
acc.push_str(&word);
acc.push_str(", ");
acc
});
output.push('{');
output.push_str(&possible_words);
output.pop();
output.pop();
output.push('}')
}
}
output.push(' ')
}
output.pop();
println!("{:?}", output);
}
|
// ===============================================================================================
// Imports
// ===============================================================================================
use spin::RwLock;
// ===============================================================================================
// Statics
// ===============================================================================================
pub static LEVEL_MASK: RwLock<u8> = RwLock::new(0xFF);
// ===============================================================================================
// Log Level
// ===============================================================================================
#[repr(u8)]
pub enum LogLevel {
Error = 1 << 0,
Warn = 1 << 1,
Debug = 1 << 4,
Info = 1 << 5,
Trace = 1 << 7,
}
pub fn enable(level: LogLevel) {
let mut mask = LEVEL_MASK.try_write();
while mask.is_none() {
mask = LEVEL_MASK.try_write();
}
*(mask.expect("Impossible")) |= level as u8;
}
pub fn disable(level: LogLevel) {
let mut mask = LEVEL_MASK.try_write();
while mask.is_none() {
mask = LEVEL_MASK.try_write();
}
*(mask.expect("Impossible")) &= !(level as u8);
}
pub fn is_enabled(level: LogLevel) -> bool {
*(LEVEL_MASK.read()) | (level as u8) != 0
}
|
use rand::Rng;
use rodio;
use rodio::Device;
use rodio::Source;
use std::collections::HashMap;
use std::fs::File;
use std::convert::AsRef;
use std::io;
use std::io::prelude::*;
use std::sync::Arc;
type Sounds = Vec<Sound>;
type SoundsMap = HashMap<SoundType, Sounds>;
#[derive(Eq, PartialEq, std::hash::Hash, Clone, Copy)]
pub enum SoundType {
Click,
Death,
Eat,
Step,
Warp,
}
pub struct SoundManager {
sounds_map: SoundsMap,
device: Option<Device>,
rng: rand::rngs::ThreadRng,
}
pub struct Sound(Arc<Vec<u8>>);
impl AsRef<[u8]> for Sound {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl Sound {
pub fn load(filename: &str) -> io::Result<Sound> {
let mut buf = Vec::new();
let mut file = File::open(filename)?;
file.read_to_end(&mut buf)?;
Ok(Sound(Arc::new(buf)))
}
pub fn cursor(self: &Self) -> io::Cursor<Sound> {
io::Cursor::new(Sound(self.0.clone()))
}
pub fn decoder(self: &Self) -> rodio::Decoder<io::Cursor<Sound>> {
rodio::Decoder::new(self.cursor()).unwrap()
}
}
fn create_source(sounds_map: &mut SoundsMap, sound_type: SoundType, path: &str) {
let sounds = match sounds_map.get_mut(&sound_type) {
Some(decoders) => decoders,
None => {
sounds_map.insert(sound_type, Sounds::new());
sounds_map.get_mut(&sound_type).unwrap()
}
};
let sound = Sound::load(path).unwrap();
sounds.push(sound);
}
impl SoundManager {
pub fn new() -> Self {
let maybe_device = rodio::default_output_device();
let mut sounds_map = HashMap::new();
create_source(&mut sounds_map, SoundType::Click, "resources/click.wav");
create_source(&mut sounds_map, SoundType::Death, "resources/death.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat1.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat2.wav");
create_source(&mut sounds_map, SoundType::Eat, "resources/eat3.wav");
create_source(&mut sounds_map, SoundType::Step, "resources/step1.wav");
create_source(&mut sounds_map, SoundType::Step, "resources/step2.wav");
create_source(&mut sounds_map, SoundType::Warp, "resources/warp.wav");
SoundManager {
device: maybe_device,
sounds_map,
rng: rand::thread_rng(),
}
}
pub fn play(&mut self, sound_type: SoundType) {
let maybe_sounds = self.sounds_map.get(&sound_type);
if let Some(sounds) = maybe_sounds {
let random_index = self.rng.gen_range(0, sounds.len());
let maybe_sound = sounds.get(random_index);
if let Some(sound) = &maybe_sound {
if let Some(device) = &self.device {
rodio::play_raw(device, sound.decoder().convert_samples());
}
}
}
}
}
|
pub mod outer {
pub fn a() {
println!("function a");
}
pub fn b() {
println!("function b");
}
pub mod inner {
pub fn c() {
println!("function c");
}
pub fn d() {
println!("function d");
}
}
}
|
//! The functions in this module are expected to work. They have been tested by hand, but currently can't be tested programmatically because console doesn't have a testing functionality.
use crate::{settings::Settings, vpn::util::UserConfig};
use anyhow::{Context, Result};
use console::Term;
use directories::ProjectDirs;
use std::fs::create_dir;
/// Asks for every setting and creates the app's config directories.
pub fn initialize(config: &mut UserConfig, pdir: &ProjectDirs, terminal: &Term) -> Result<()> {
ask_for_settings(config, terminal)?;
create_config_dir(&pdir)?;
Ok(())
}
fn ask_for_settings(config: &mut UserConfig, terminal: &Term) -> Result<()> {
let mut user_settings = Settings::new(config.clone(), terminal);
user_settings.set_username()?;
user_settings.set_password()?;
user_settings.set_tier()?;
user_settings.set_protocol()?;
*config = user_settings.into_inner();
Ok(())
}
fn create_config_dir(pdir: &ProjectDirs) -> Result<()> {
create_dir(pdir.config_dir()).context("Failed to create app config dir")
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.