text stringlengths 8 4.13M |
|---|
#[doc = "Register `CFGR2` reader"]
pub type R = crate::R<CFGR2_SPEC>;
#[doc = "Register `CFGR2` writer"]
pub type W = crate::W<CFGR2_SPEC>;
#[doc = "Field `ROVSE` reader - ADC oversampler enable on scope ADC group regular"]
pub type ROVSE_R = crate::BitReader<ROVSE_A>;
#[doc = "ADC oversampler enable on scope ADC group regular\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ROVSE_A {
#[doc = "0: Regular oversampling disabled"]
Disabled = 0,
#[doc = "1: Regular oversampling enabled"]
Enabled = 1,
}
impl From<ROVSE_A> for bool {
#[inline(always)]
fn from(variant: ROVSE_A) -> Self {
variant as u8 != 0
}
}
impl ROVSE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ROVSE_A {
match self.bits {
false => ROVSE_A::Disabled,
true => ROVSE_A::Enabled,
}
}
#[doc = "Regular oversampling disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == ROVSE_A::Disabled
}
#[doc = "Regular oversampling enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == ROVSE_A::Enabled
}
}
#[doc = "Field `ROVSE` writer - ADC oversampler enable on scope ADC group regular"]
pub type ROVSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ROVSE_A>;
impl<'a, REG, const O: u8> ROVSE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Regular oversampling disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(ROVSE_A::Disabled)
}
#[doc = "Regular oversampling enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(ROVSE_A::Enabled)
}
}
#[doc = "Field `JOVSE` reader - ADC oversampler enable on scope ADC group injected"]
pub type JOVSE_R = crate::BitReader<JOVSE_A>;
#[doc = "ADC oversampler enable on scope ADC group injected\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JOVSE_A {
#[doc = "0: Injected oversampling disabled"]
Disabled = 0,
#[doc = "1: Injected oversampling enabled"]
Enabled = 1,
}
impl From<JOVSE_A> for bool {
#[inline(always)]
fn from(variant: JOVSE_A) -> Self {
variant as u8 != 0
}
}
impl JOVSE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JOVSE_A {
match self.bits {
false => JOVSE_A::Disabled,
true => JOVSE_A::Enabled,
}
}
#[doc = "Injected oversampling disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == JOVSE_A::Disabled
}
#[doc = "Injected oversampling enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == JOVSE_A::Enabled
}
}
#[doc = "Field `JOVSE` writer - ADC oversampler enable on scope ADC group injected"]
pub type JOVSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, JOVSE_A>;
impl<'a, REG, const O: u8> JOVSE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Injected oversampling disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(JOVSE_A::Disabled)
}
#[doc = "Injected oversampling enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(JOVSE_A::Enabled)
}
}
#[doc = "Field `OVSS` reader - ADC oversampling shift"]
pub type OVSS_R = crate::FieldReader;
#[doc = "Field `OVSS` writer - ADC oversampling shift"]
pub type OVSS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `TROVS` reader - ADC oversampling discontinuous mode (triggered mode) for ADC group regular"]
pub type TROVS_R = crate::BitReader<TROVS_A>;
#[doc = "ADC oversampling discontinuous mode (triggered mode) for ADC group regular\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TROVS_A {
#[doc = "0: All oversampled conversions for a channel are run following a trigger"]
Automatic = 0,
#[doc = "1: Each oversampled conversion for a channel needs a new trigger"]
Triggered = 1,
}
impl From<TROVS_A> for bool {
#[inline(always)]
fn from(variant: TROVS_A) -> Self {
variant as u8 != 0
}
}
impl TROVS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TROVS_A {
match self.bits {
false => TROVS_A::Automatic,
true => TROVS_A::Triggered,
}
}
#[doc = "All oversampled conversions for a channel are run following a trigger"]
#[inline(always)]
pub fn is_automatic(&self) -> bool {
*self == TROVS_A::Automatic
}
#[doc = "Each oversampled conversion for a channel needs a new trigger"]
#[inline(always)]
pub fn is_triggered(&self) -> bool {
*self == TROVS_A::Triggered
}
}
#[doc = "Field `TROVS` writer - ADC oversampling discontinuous mode (triggered mode) for ADC group regular"]
pub type TROVS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TROVS_A>;
impl<'a, REG, const O: u8> TROVS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "All oversampled conversions for a channel are run following a trigger"]
#[inline(always)]
pub fn automatic(self) -> &'a mut crate::W<REG> {
self.variant(TROVS_A::Automatic)
}
#[doc = "Each oversampled conversion for a channel needs a new trigger"]
#[inline(always)]
pub fn triggered(self) -> &'a mut crate::W<REG> {
self.variant(TROVS_A::Triggered)
}
}
#[doc = "Field `ROVSM` reader - Regular Oversampling mode"]
pub type ROVSM_R = crate::BitReader<ROVSM_A>;
#[doc = "Regular Oversampling mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ROVSM_A {
#[doc = "0: Oversampling is temporary stopped and continued after injection sequence"]
Continued = 0,
#[doc = "1: Oversampling is aborted and resumed from start after injection sequence"]
Resumed = 1,
}
impl From<ROVSM_A> for bool {
#[inline(always)]
fn from(variant: ROVSM_A) -> Self {
variant as u8 != 0
}
}
impl ROVSM_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ROVSM_A {
match self.bits {
false => ROVSM_A::Continued,
true => ROVSM_A::Resumed,
}
}
#[doc = "Oversampling is temporary stopped and continued after injection sequence"]
#[inline(always)]
pub fn is_continued(&self) -> bool {
*self == ROVSM_A::Continued
}
#[doc = "Oversampling is aborted and resumed from start after injection sequence"]
#[inline(always)]
pub fn is_resumed(&self) -> bool {
*self == ROVSM_A::Resumed
}
}
#[doc = "Field `ROVSM` writer - Regular Oversampling mode"]
pub type ROVSM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ROVSM_A>;
impl<'a, REG, const O: u8> ROVSM_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Oversampling is temporary stopped and continued after injection sequence"]
#[inline(always)]
pub fn continued(self) -> &'a mut crate::W<REG> {
self.variant(ROVSM_A::Continued)
}
#[doc = "Oversampling is aborted and resumed from start after injection sequence"]
#[inline(always)]
pub fn resumed(self) -> &'a mut crate::W<REG> {
self.variant(ROVSM_A::Resumed)
}
}
#[doc = "Field `RSHIFT1` reader - Right-shift data after Offset 1 correction"]
pub type RSHIFT1_R = crate::BitReader<RSHIFT1_A>;
#[doc = "Right-shift data after Offset 1 correction\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RSHIFT1_A {
#[doc = "0: Right-shifting disabled"]
Disabled = 0,
#[doc = "1: Data is right-shifted 1-bit"]
Enabled = 1,
}
impl From<RSHIFT1_A> for bool {
#[inline(always)]
fn from(variant: RSHIFT1_A) -> Self {
variant as u8 != 0
}
}
impl RSHIFT1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RSHIFT1_A {
match self.bits {
false => RSHIFT1_A::Disabled,
true => RSHIFT1_A::Enabled,
}
}
#[doc = "Right-shifting disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == RSHIFT1_A::Disabled
}
#[doc = "Data is right-shifted 1-bit"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == RSHIFT1_A::Enabled
}
}
#[doc = "Field `RSHIFT1` writer - Right-shift data after Offset 1 correction"]
pub type RSHIFT1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RSHIFT1_A>;
impl<'a, REG, const O: u8> RSHIFT1_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Right-shifting disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(RSHIFT1_A::Disabled)
}
#[doc = "Data is right-shifted 1-bit"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(RSHIFT1_A::Enabled)
}
}
#[doc = "Field `RSHIFT2` reader - Right-shift data after Offset 2 correction"]
pub use RSHIFT1_R as RSHIFT2_R;
#[doc = "Field `RSHIFT3` reader - Right-shift data after Offset 3 correction"]
pub use RSHIFT1_R as RSHIFT3_R;
#[doc = "Field `RSHIFT4` reader - Right-shift data after Offset 4 correction"]
pub use RSHIFT1_R as RSHIFT4_R;
#[doc = "Field `RSHIFT2` writer - Right-shift data after Offset 2 correction"]
pub use RSHIFT1_W as RSHIFT2_W;
#[doc = "Field `RSHIFT3` writer - Right-shift data after Offset 3 correction"]
pub use RSHIFT1_W as RSHIFT3_W;
#[doc = "Field `RSHIFT4` writer - Right-shift data after Offset 4 correction"]
pub use RSHIFT1_W as RSHIFT4_W;
#[doc = "Field `OSVR` reader - Oversampling ratio"]
pub type OSVR_R = crate::FieldReader<u16>;
#[doc = "Field `OSVR` writer - Oversampling ratio"]
pub type OSVR_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 10, O, u16>;
#[doc = "Field `LSHIFT` reader - Left shift factor"]
pub type LSHIFT_R = crate::FieldReader;
#[doc = "Field `LSHIFT` writer - Left shift factor"]
pub type LSHIFT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
impl R {
#[doc = "Bit 0 - ADC oversampler enable on scope ADC group regular"]
#[inline(always)]
pub fn rovse(&self) -> ROVSE_R {
ROVSE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - ADC oversampler enable on scope ADC group injected"]
#[inline(always)]
pub fn jovse(&self) -> JOVSE_R {
JOVSE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 5:8 - ADC oversampling shift"]
#[inline(always)]
pub fn ovss(&self) -> OVSS_R {
OVSS_R::new(((self.bits >> 5) & 0x0f) as u8)
}
#[doc = "Bit 9 - ADC oversampling discontinuous mode (triggered mode) for ADC group regular"]
#[inline(always)]
pub fn trovs(&self) -> TROVS_R {
TROVS_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Regular Oversampling mode"]
#[inline(always)]
pub fn rovsm(&self) -> ROVSM_R {
ROVSM_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Right-shift data after Offset 1 correction"]
#[inline(always)]
pub fn rshift1(&self) -> RSHIFT1_R {
RSHIFT1_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Right-shift data after Offset 2 correction"]
#[inline(always)]
pub fn rshift2(&self) -> RSHIFT2_R {
RSHIFT2_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Right-shift data after Offset 3 correction"]
#[inline(always)]
pub fn rshift3(&self) -> RSHIFT3_R {
RSHIFT3_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Right-shift data after Offset 4 correction"]
#[inline(always)]
pub fn rshift4(&self) -> RSHIFT4_R {
RSHIFT4_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bits 16:25 - Oversampling ratio"]
#[inline(always)]
pub fn osvr(&self) -> OSVR_R {
OSVR_R::new(((self.bits >> 16) & 0x03ff) as u16)
}
#[doc = "Bits 28:31 - Left shift factor"]
#[inline(always)]
pub fn lshift(&self) -> LSHIFT_R {
LSHIFT_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 0 - ADC oversampler enable on scope ADC group regular"]
#[inline(always)]
#[must_use]
pub fn rovse(&mut self) -> ROVSE_W<CFGR2_SPEC, 0> {
ROVSE_W::new(self)
}
#[doc = "Bit 1 - ADC oversampler enable on scope ADC group injected"]
#[inline(always)]
#[must_use]
pub fn jovse(&mut self) -> JOVSE_W<CFGR2_SPEC, 1> {
JOVSE_W::new(self)
}
#[doc = "Bits 5:8 - ADC oversampling shift"]
#[inline(always)]
#[must_use]
pub fn ovss(&mut self) -> OVSS_W<CFGR2_SPEC, 5> {
OVSS_W::new(self)
}
#[doc = "Bit 9 - ADC oversampling discontinuous mode (triggered mode) for ADC group regular"]
#[inline(always)]
#[must_use]
pub fn trovs(&mut self) -> TROVS_W<CFGR2_SPEC, 9> {
TROVS_W::new(self)
}
#[doc = "Bit 10 - Regular Oversampling mode"]
#[inline(always)]
#[must_use]
pub fn rovsm(&mut self) -> ROVSM_W<CFGR2_SPEC, 10> {
ROVSM_W::new(self)
}
#[doc = "Bit 11 - Right-shift data after Offset 1 correction"]
#[inline(always)]
#[must_use]
pub fn rshift1(&mut self) -> RSHIFT1_W<CFGR2_SPEC, 11> {
RSHIFT1_W::new(self)
}
#[doc = "Bit 12 - Right-shift data after Offset 2 correction"]
#[inline(always)]
#[must_use]
pub fn rshift2(&mut self) -> RSHIFT2_W<CFGR2_SPEC, 12> {
RSHIFT2_W::new(self)
}
#[doc = "Bit 13 - Right-shift data after Offset 3 correction"]
#[inline(always)]
#[must_use]
pub fn rshift3(&mut self) -> RSHIFT3_W<CFGR2_SPEC, 13> {
RSHIFT3_W::new(self)
}
#[doc = "Bit 14 - Right-shift data after Offset 4 correction"]
#[inline(always)]
#[must_use]
pub fn rshift4(&mut self) -> RSHIFT4_W<CFGR2_SPEC, 14> {
RSHIFT4_W::new(self)
}
#[doc = "Bits 16:25 - Oversampling ratio"]
#[inline(always)]
#[must_use]
pub fn osvr(&mut self) -> OSVR_W<CFGR2_SPEC, 16> {
OSVR_W::new(self)
}
#[doc = "Bits 28:31 - Left shift factor"]
#[inline(always)]
#[must_use]
pub fn lshift(&mut self) -> LSHIFT_W<CFGR2_SPEC, 28> {
LSHIFT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR2_SPEC;
impl crate::RegisterSpec for CFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr2::R`](R) reader structure"]
impl crate::Readable for CFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr2::W`](W) writer structure"]
impl crate::Writable for CFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR2 to value 0"]
impl crate::Resettable for CFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright 2021. The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
use alloc::vec::Vec;
use core::{
cmp::Ordering,
hash::{Hash, Hasher},
ops::{Add, Mul},
};
use snafu::prelude::*;
use tari_utilities::{ByteArray, ByteArrayError};
use crate::{
commitment::{HomomorphicCommitment, HomomorphicCommitmentFactory},
keys::{PublicKey, SecretKey},
};
/// An error when creating a commitment signature
#[derive(Clone, Debug, Snafu, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[allow(missing_docs)]
pub enum CommitmentSignatureError {
#[snafu(display("An invalid challenge was provided"))]
InvalidChallenge,
}
/// # Commitment Signatures
///
/// Find out more about Commitment signatures [here](https://eprint.iacr.org/2020/061.pdf) and
/// [here](https://documents.uow.edu.au/~wsusilo/ZCMS_IJNS08.pdf).
///
/// In short, a Commitment Signature is made up of the tuple _(R, u, v)_, where _R_ is a random Pedersen commitment (of
/// two secret nonces) and _u_ and _v_ are the two publicly known private signature keys. It demonstrates ownership of
/// a specific commitment.
///
/// The Commitment Signature signes a challenge with the value commitment's value and blinding factor. The two nonces
/// should be completely random and never reused - that responsibility lies with the calling function.
/// C = a*H + x*G ... (Pedersen commitment to the value 'a' using blinding factor 'x')
/// R = k_2*H + k_1*G ... (a public (Pedersen) commitment nonce created with the two random nonces)
/// u = k_1 + e.x ... (the first publicly known private key of the signature signing with 'x')
/// v = k_2 + e.a ... (the second publicly known private key of the signature signing with 'a')
/// signature = (R, u, v) ... (the final signature tuple)
///
/// Verification of the Commitment Signature (R, u, v) entails the following:
/// S = v*H + u*G ... (Pedersen commitment of the publicly known private signature keys)
/// S =? R + e.C ... (final verification)
#[allow(non_snake_case)]
#[derive(Debug, Clone)]
#[cfg_attr(feature = "borsh", derive(borsh::BorshDeserialize, borsh::BorshSerialize))]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct CommitmentSignature<P, K> {
public_nonce: HomomorphicCommitment<P>,
u: K,
v: K,
}
impl<P, K> CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
/// Creates a new [CommitmentSignature]
pub fn new(public_nonce: HomomorphicCommitment<P>, u: K, v: K) -> Self {
CommitmentSignature { public_nonce, u, v }
}
/// This is the left-hand side of the signature verification equation
pub fn calc_signature_verifier<C>(&self, factory: &C) -> HomomorphicCommitment<P>
where C: HomomorphicCommitmentFactory<P = P> {
// v*H + u*G
factory.commit(&self.u, &self.v)
}
/// Sign the provided challenge with the value commitment's value and blinding factor. The two nonces should be
/// completely random and never reused - that responsibility lies with the calling function.
///
/// WARNING: The provided secret keys and nonces are NOT bound to the challenge. This method assumes that the
/// challenge has been constructed such that all commitments are already included in the challenge.
pub fn sign<C>(
secret_a: &K,
secret_x: &K,
nonce_a: &K,
nonce_x: &K,
challenge: &[u8],
factory: &C,
) -> Result<Self, CommitmentSignatureError>
where
K: Mul<P, Output = P>,
for<'a> &'a K: Add<&'a K, Output = K>,
for<'a> &'a K: Mul<&'a K, Output = K>,
C: HomomorphicCommitmentFactory<P = P>,
{
let e = match K::from_bytes(challenge) {
Ok(e) => e,
Err(_) => return Err(CommitmentSignatureError::InvalidChallenge),
};
let ea = &e * secret_a;
let ex = &e * secret_x;
let v = nonce_a + &ea;
let u = nonce_x + &ex;
let public_commitment_nonce = factory.commit(nonce_x, nonce_a);
Ok(Self::new(public_commitment_nonce, u, v))
}
/// Verify if the commitment signature signed the commitment using the specified challenge (as bytes). If the
/// provided challenge n bytes cannot be converted to a secret key, this function also returns false.
pub fn verify_challenge<'a, C>(
&self,
public_commitment: &'a HomomorphicCommitment<P>,
challenge: &[u8],
factory: &C,
) -> bool
where
for<'b> &'a HomomorphicCommitment<P>: Mul<&'b K, Output = HomomorphicCommitment<P>>,
for<'b> &'b HomomorphicCommitment<P>: Add<&'b HomomorphicCommitment<P>, Output = HomomorphicCommitment<P>>,
C: HomomorphicCommitmentFactory<P = P>,
{
let e = match K::from_bytes(challenge) {
Ok(e) => e,
Err(_) => return false,
};
self.verify(public_commitment, &e, factory)
}
/// Verify if the commitment signature signed the commitment using the specified challenge (as secret key).
/// v*H + u*G = R + e.C
pub fn verify<'a, C>(&self, public_commitment: &'a HomomorphicCommitment<P>, challenge: &K, factory: &C) -> bool
where
for<'b> &'a HomomorphicCommitment<P>: Mul<&'b K, Output = HomomorphicCommitment<P>>,
for<'b> &'b HomomorphicCommitment<P>: Add<&'b HomomorphicCommitment<P>, Output = HomomorphicCommitment<P>>,
C: HomomorphicCommitmentFactory<P = P>,
{
// v*H + u*G
let lhs = self.calc_signature_verifier(factory);
// R + e.C
let rhs = &self.public_nonce + &(public_commitment * challenge);
// Implementors should make this a constant time comparison
lhs == rhs
}
/// This function returns the complete signature tuple (R, u, v)
pub fn complete_signature_tuple(&self) -> (&HomomorphicCommitment<P>, &K, &K) {
(&self.public_nonce, &self.u, &self.v)
}
/// This function returns the first publicly known private key of the signature tuple (u)
pub fn u(&self) -> &K {
&self.u
}
/// This function returns the second publicly known private key of the signature tuple (v)
pub fn v(&self) -> &K {
&self.v
}
/// This function returns the public commitment public_nonce of the signature tuple (R)
pub fn public_nonce(&self) -> &HomomorphicCommitment<P> {
&self.public_nonce
}
/// Returns a canonical byte representation of the commitment signature
pub fn to_vec(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(P::key_length() + K::key_length() + K::key_length());
buf.extend_from_slice(self.public_nonce().as_bytes());
buf.extend_from_slice(self.u().as_bytes());
buf.extend_from_slice(self.v().as_bytes());
buf
}
/// From a canonical byte representation, retrieves a commitment signature
pub fn from_bytes(buf: &[u8]) -> Result<Self, ByteArrayError> {
if buf.len() != P::KEY_LEN + 2 * K::key_length() {
return Err(ByteArrayError::IncorrectLength {});
}
let public_nonce = HomomorphicCommitment::from_public_key(&P::from_bytes(&buf[0..P::KEY_LEN])?);
let u = K::from_bytes(&buf[P::KEY_LEN..P::KEY_LEN + K::key_length()])?;
let v = K::from_bytes(&buf[P::KEY_LEN + K::key_length()..P::KEY_LEN + 2 * K::key_length()])?;
Ok(Self { public_nonce, u, v })
}
}
impl<'a, 'b, P, K> Add<&'b CommitmentSignature<P, K>> for &'a CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
&'a HomomorphicCommitment<P>: Add<&'b HomomorphicCommitment<P>, Output = HomomorphicCommitment<P>>,
K: SecretKey,
&'a K: Add<&'b K, Output = K>,
{
type Output = CommitmentSignature<P, K>;
fn add(self, rhs: &'b CommitmentSignature<P, K>) -> CommitmentSignature<P, K> {
let r_sum = self.public_nonce() + rhs.public_nonce();
let s_u_sum = self.u() + rhs.u();
let s_v_sum = self.v() + rhs.v();
CommitmentSignature::new(r_sum, s_u_sum, s_v_sum)
}
}
impl<'a, P, K> Add<CommitmentSignature<P, K>> for &'a CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
for<'b> &'a HomomorphicCommitment<P>: Add<&'b HomomorphicCommitment<P>, Output = HomomorphicCommitment<P>>,
K: SecretKey,
for<'b> &'a K: Add<&'b K, Output = K>,
{
type Output = CommitmentSignature<P, K>;
fn add(self, rhs: CommitmentSignature<P, K>) -> CommitmentSignature<P, K> {
let r_sum = self.public_nonce() + rhs.public_nonce();
let s_u_sum = self.u() + rhs.u();
let s_v_sum = self.v() + rhs.v();
CommitmentSignature::new(r_sum, s_u_sum, s_v_sum)
}
}
impl<P, K> Default for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
fn default() -> Self {
CommitmentSignature::new(HomomorphicCommitment::<P>::default(), K::default(), K::default())
}
}
/// Provide an efficient ordering algorithm for Commitment signatures. It's probably not a good idea to implement `Ord`
/// for secret keys, but in this instance, the signature is publicly known and is simply a scalar, so we use the bytes
/// representation of the scalar as the canonical ordering metric. This conversion is done if and only if the public
/// nonces are already equal, otherwise the public nonce ordering determines the CommitmentSignature order.
impl<P, K> Ord for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
fn cmp(&self, other: &Self) -> Ordering {
match self.public_nonce().cmp(other.public_nonce()) {
Ordering::Equal => {
let this_u = self.u().as_bytes();
let that_u = other.u().as_bytes();
match this_u.cmp(that_u) {
Ordering::Equal => {
let this = self.v().as_bytes();
let that = other.v().as_bytes();
this.cmp(that)
},
v => v,
}
},
v => v,
}
}
}
impl<P, K> PartialOrd for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<P, K> PartialEq for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
fn eq(&self, other: &Self) -> bool {
self.public_nonce().eq(other.public_nonce()) && self.u().eq(other.u()) && self.v().eq(other.v())
}
}
impl<P, K> Eq for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
}
impl<P, K> Hash for CommitmentSignature<P, K>
where
P: PublicKey<K = K>,
K: SecretKey,
{
fn hash<H: Hasher>(&self, state: &mut H) {
state.write(&self.to_vec())
}
}
|
use std::cmp::Ordering::{self, *};
use cc_traits::Iter;
use crate::{IsBot, IsTop, LatticeFrom, LatticeOrd, Merge};
/// Vec-union compound lattice.
///
/// Contains any number of `Lat` sub-lattices. Sub-lattices are indexed starting at zero, merging
/// combines corresponding sub-lattices and keeps any excess.
///
/// Similar to [`MapUnion<<usize, Lat>>`](super::map_union::MapUnion) but requires the key indices
/// start with `0`, `1`, `2`, etc: i.e. integers starting at zero with no gaps.
#[derive(Clone, Debug, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct VecUnion<Lat> {
vec: Vec<Lat>,
}
impl<Lat> VecUnion<Lat> {
/// Create a new `VecUnion` from a `Vec` of `Lat` instances.
pub fn new(vec: Vec<Lat>) -> Self {
Self { vec }
}
/// Create a new `VecUnion` from an `Into<Vec<Lat>>`.
pub fn new_from(vec: impl Into<Vec<Lat>>) -> Self {
Self::new(vec.into())
}
/// Reveal the inner value as a shared reference.
pub fn as_reveal_ref(&self) -> &Vec<Lat> {
&self.vec
}
/// Reveal the inner value as an exclusive reference.
pub fn as_reveal_mut(&mut self) -> &mut Vec<Lat> {
&mut self.vec
}
/// Gets the inner by value, consuming self.
pub fn into_reveal(self) -> Vec<Lat> {
self.vec
}
}
impl<Lat> Default for VecUnion<Lat> {
fn default() -> Self {
Self {
vec: Default::default(),
}
}
}
impl<LatSelf, LatOther> Merge<VecUnion<LatOther>> for VecUnion<LatSelf>
where
LatSelf: Merge<LatOther> + LatticeFrom<LatOther>,
{
fn merge(&mut self, mut other: VecUnion<LatOther>) -> bool {
let mut changed = false;
// Extend `self` if `other` is longer.
if self.vec.len() < other.vec.len() {
self.vec
.extend(other.vec.drain(self.vec.len()..).map(LatSelf::lattice_from));
changed = true;
}
// Merge intersecting indices.
for (self_val, other_val) in self.vec.iter_mut().zip(other.vec) {
changed |= self_val.merge(other_val);
}
changed
}
}
impl<LatSelf, LatOther> LatticeFrom<VecUnion<LatOther>> for VecUnion<LatSelf>
where
LatSelf: LatticeFrom<LatOther>,
{
fn lattice_from(other: VecUnion<LatOther>) -> Self {
Self::new(other.vec.into_iter().map(LatSelf::lattice_from).collect())
}
}
impl<LatSelf, LatOther> PartialEq<VecUnion<LatOther>> for VecUnion<LatSelf>
where
LatSelf: PartialEq<LatOther>,
{
fn eq(&self, other: &VecUnion<LatOther>) -> bool {
if self.vec.len() != other.vec.len() {
return false;
}
return self
.vec
.iter()
.zip(other.vec.iter())
.all(|(val_self, val_other)| val_self == val_other);
}
}
impl<LatSelf, LatOther> PartialOrd<VecUnion<LatOther>> for VecUnion<LatSelf>
where
LatSelf: PartialOrd<LatOther>,
{
fn partial_cmp(&self, other: &VecUnion<LatOther>) -> Option<Ordering> {
let (self_len, other_len) = (self.vec.len(), other.vec.len());
let mut self_any_greater = other_len < self_len;
let mut other_any_greater = self_len < other_len;
for (self_val, other_val) in self.vec.iter().zip(other.vec.iter()) {
match self_val.partial_cmp(other_val) {
None => {
return None;
}
Some(Less) => {
other_any_greater = true;
}
Some(Greater) => {
self_any_greater = true;
}
Some(Equal) => {}
}
if self_any_greater && other_any_greater {
return None;
}
}
match (self_any_greater, other_any_greater) {
(true, false) => Some(Greater),
(false, true) => Some(Less),
(false, false) => Some(Equal),
// We check this one after each loop iteration above.
(true, true) => unreachable!(),
}
}
}
impl<LatSelf, LatOther> LatticeOrd<VecUnion<LatOther>> for VecUnion<LatSelf> where
Self: PartialOrd<VecUnion<LatOther>>
{
}
impl<Lat> IsBot for VecUnion<Lat> {
fn is_bot(&self) -> bool {
self.vec.is_empty()
}
}
impl<Lat> IsTop for VecUnion<Lat> {
fn is_top(&self) -> bool {
false
}
}
#[cfg(test)]
mod test {
use std::collections::HashSet;
use super::*;
use crate::set_union::SetUnionHashSet;
use crate::test::{cartesian_power, check_all};
use crate::Max;
#[test]
fn basic() {
let mut my_vec_a = VecUnion::<Max<usize>>::default();
let my_vec_b = VecUnion::new(vec![Max::new(9), Max::new(4), Max::new(5)]);
let my_vec_c = VecUnion::new(vec![Max::new(2), Max::new(5)]);
assert!(my_vec_a.merge(my_vec_b.clone()));
assert!(!my_vec_a.merge(my_vec_b));
assert!(my_vec_a.merge(my_vec_c.clone()));
assert!(!my_vec_a.merge(my_vec_c));
}
#[test]
fn consistency() {
let mut test_vec = vec![VecUnion::new(vec![] as Vec<SetUnionHashSet<_>>)];
let vals = [vec![], vec![0], vec![1], vec![0, 1]]
.map(HashSet::from_iter)
.map(SetUnionHashSet::new);
test_vec.extend(
cartesian_power::<_, 1>(&vals)
.map(|row| VecUnion::new(row.into_iter().cloned().collect())),
);
test_vec.extend(
cartesian_power::<_, 2>(&vals)
.map(|row| VecUnion::new(row.into_iter().cloned().collect())),
);
check_all(&test_vec);
}
}
|
extern crate time;
extern crate rustlsystem;
mod app;
use app::Application;
fn main() {
Application::run();
}
|
use x86_64::structures::paging::FrameAllocator;
use x86_64::structures::paging::Size4KiB;
use x86_64::structures::paging::PhysFrame;
use x86_64::structures::paging::Page;
use x86_64::VirtAddr;
pub fn init_frame_allocator<T: Iterator<Item = PhysFrame>>(frames: T) -> BumpFrameAllocator<impl Iterator<Item = PhysFrame>> {
BumpFrameAllocator { frames }
}
pub fn init_page_allocator(start_address: u64) -> BumpPageAllocator {
BumpPageAllocator {
addr: VirtAddr::new(start_address)
}
}
pub struct BumpPageAllocator {
addr: VirtAddr
}
pub trait PageAllocator {
fn allocate_page(&mut self) -> Page<Size4KiB>;
}
impl PageAllocator for BumpPageAllocator {
fn allocate_page(&mut self) -> Page<Size4KiB> {
let result = Page::from_start_address(self.addr);
self.addr += 0x1000u64;
result.unwrap()
}
}
pub struct EmptyFrameAllocator;
unsafe impl FrameAllocator<Size4KiB> for EmptyFrameAllocator {
fn allocate_frame(&mut self) -> Option<PhysFrame> {
None
}
}
pub struct BumpFrameAllocator<I: Iterator<Item = PhysFrame>> {
frames: I
}
unsafe impl<I: Iterator<Item = PhysFrame>> FrameAllocator<Size4KiB> for BumpFrameAllocator<I> {
fn allocate_frame(&mut self) -> Option<PhysFrame> {
self.frames.next()
}
} |
pub mod game;
mod ui;
extern crate rand;
extern crate rustc_serialize;
use std::env::args;
use std::fs::File;
use game::board::Board;
use game::player::Player;
fn main() {
let mut args = args();
if let Some(filename) = args.nth(1) {
let mut file_handle = File::open(&filename).expect(&format!("Unable to open file {}", filename));
let board = Board::build_board(&mut file_handle).unwrap();
let player = Player::new(board.spawn_location());
ui::game_loop(player);
} else {
println!("Usage: ./main file_name.json | cargo run -- file_name.json");
}
}
|
extern crate conch_parser;
use conch_parser::ast::builder::*;
use conch_parser::parse::ParseError::*;
use conch_parser::token::Token;
mod parse_support;
use parse_support::*;
#[test]
fn test_subshell_valid() {
let mut p = make_parser("( foo\nbar; baz\n#comment\n )");
let correct = CommandGroup {
commands: vec!(cmd("foo"), cmd("bar"), cmd("baz")),
trailing_comments: vec!(Newline(Some("#comment".into()))),
};
assert_eq!(correct, p.subshell().unwrap());
}
#[test]
fn test_subshell_valid_separator_not_needed() {
let correct = CommandGroup {
commands: vec!(cmd("foo")),
trailing_comments: vec!(),
};
assert_eq!(correct, make_parser("( foo )").subshell().unwrap());
let correct_with_comment = CommandGroup {
commands: vec!(cmd("foo")),
trailing_comments: vec!(Newline(Some("#comment".into()))),
};
assert_eq!(correct_with_comment, make_parser("( foo\n#comment\n )").subshell().unwrap());
}
#[test]
fn test_subshell_space_between_parens_not_needed() {
let mut p = make_parser("(foo )");
p.subshell().unwrap();
let mut p = make_parser("( foo)");
p.subshell().unwrap();
let mut p = make_parser("(foo)");
p.subshell().unwrap();
}
#[test]
fn test_subshell_invalid_missing_keyword() {
assert_eq!(Err(Unmatched(Token::ParenOpen, src(0,1,1))), make_parser("( foo\nbar; baz").subshell());
assert_eq!(Err(Unexpected(Token::Name(String::from("foo")), src(0,1,1))),
make_parser("foo\nbar; baz; )").subshell());
}
#[test]
fn test_subshell_invalid_quoted() {
let cmds = [
("'(' foo\nbar; baz; )", Unexpected(Token::SingleQuote, src(0,1,1))),
("( foo\nbar; baz; ')'", Unmatched(Token::ParenOpen, src(0,1,1))),
("\"(\" foo\nbar; baz; )", Unexpected(Token::DoubleQuote, src(0,1,1))),
("( foo\nbar; baz; \")\"", Unmatched(Token::ParenOpen, src(0,1,1))),
];
for &(c, ref e) in cmds.into_iter() {
match make_parser(c).subshell() {
Ok(result) => panic!("Unexpectedly parsed \"{}\" as\n{:#?}", c, result),
Err(ref err) => if err != e {
panic!("Expected the source \"{}\" to return the error `{:?}`, but got `{:?}`",
c, e, err);
},
}
}
}
#[test]
fn test_subshell_invalid_missing_body() {
assert_eq!(Err(Unexpected(Token::ParenClose, src(2,2,1))), make_parser("(\n)").subshell());
assert_eq!(Err(Unexpected(Token::ParenClose, src(1,1,2))), make_parser("()").subshell());
}
|
use crate::switch::ToCKBCellDataTuple;
use crate::utils::verifier::verify_btc_witness;
use crate::utils::{
config::{PLEDGE, SIGNER_FEE_RATE, SUDT_CODE_HASH, XT_CELL_CAPACITY},
transaction::{is_XT_typescript, XChainKind},
types::{mint_xt_witness::MintXTWitnessReader, Error, ToCKBCellDataView, XExtraView},
};
use ckb_std::{
ckb_constants::Source,
debug,
high_level::{
load_cell_capacity, load_cell_data, load_cell_lock, load_cell_lock_hash, load_cell_type,
load_witness_args, QueryIter,
},
};
use core::result::Result;
use molecule::prelude::{Entity, Reader};
pub fn verify(toCKB_data_tuple: &ToCKBCellDataTuple) -> Result<(), Error> {
debug!("start mint_xt");
let input_data = toCKB_data_tuple.0.as_ref().expect("should not happen");
let output_data = toCKB_data_tuple.1.as_ref().expect("should not happen");
verify_capacity()?;
let x_extra = verify_witness(input_data)?;
debug!("verify witness finish");
verify_data(input_data, output_data, &x_extra)?;
debug!("verify data finish");
verify_xt_issue(input_data)?;
debug!("verify xt issue finish");
Ok(())
}
fn verify_data(
input_data: &ToCKBCellDataView,
output_data: &ToCKBCellDataView,
x_extra: &XExtraView,
) -> Result<(), Error> {
if input_data.signer_lockscript != output_data.signer_lockscript
|| input_data.user_lockscript != output_data.user_lockscript
|| input_data.get_raw_lot_size() != output_data.get_raw_lot_size()
|| input_data.x_lock_address != output_data.x_lock_address
|| &output_data.x_extra != x_extra
{
return Err(Error::InvalidDataChange);
}
Ok(())
}
/// ensure transfer happen on XChain by verifying the spv proof
fn verify_witness(data: &ToCKBCellDataView) -> Result<XExtraView, Error> {
let witness_args = load_witness_args(0, Source::GroupInput)?.input_type();
debug!("witness_args: {:?}", &witness_args);
if witness_args.is_none() {
return Err(Error::InvalidWitness);
}
let witness_args = witness_args.to_opt().unwrap().raw_data();
debug!("witness_args parsed: {:?}", &witness_args);
if MintXTWitnessReader::verify(&witness_args, false).is_err() {
return Err(Error::InvalidWitness);
}
let witness = MintXTWitnessReader::new_unchecked(&witness_args);
debug!("witness: {:?}", witness);
let proof = witness.spv_proof().raw_data();
let cell_dep_index_list = witness.cell_dep_index_list().raw_data();
match data.get_xchain_kind() {
XChainKind::Btc => {
let btc_extra = verify_btc_witness(
data,
proof,
cell_dep_index_list,
data.x_lock_address.as_ref(),
data.get_btc_lot_size()?.get_sudt_amount(),
false,
)?;
Ok(XExtraView::Btc(btc_extra))
}
XChainKind::Eth => todo!(),
}
}
fn verify_xt_issue(data: &ToCKBCellDataView) -> Result<(), Error> {
match data.get_xchain_kind() {
XChainKind::Btc => verify_btc_xt_issue(data),
XChainKind::Eth => todo!(),
}
}
fn verify_btc_xt_issue(data: &ToCKBCellDataView) -> Result<(), Error> {
let lock_hash = load_cell_lock_hash(0, Source::GroupInput)?;
debug!("lockscript hash: {:?}", hex::encode(lock_hash));
let input_xt_num = QueryIter::new(load_cell_type, Source::Input)
.filter(|type_opt| type_opt.is_some())
.filter(|script| is_XT_typescript(script, lock_hash.as_ref()))
.count();
if input_xt_num != 0 {
return Err(Error::InvalidXTInInputOrOutput);
}
let output_xt_num = QueryIter::new(load_cell_type, Source::Output)
.filter(|type_opt| type_opt.is_some())
.filter(|script| is_XT_typescript(script, lock_hash.as_ref()))
.count();
debug!("output_xt_num: {}", output_xt_num);
if output_xt_num != 2 {
return Err(Error::InvalidXTInInputOrOutput);
}
let xt_amount = data.get_btc_lot_size()?.get_sudt_amount();
debug!("xt_amount: {}", xt_amount);
// fixed order of output cells is required
// user-sudt-cell should be outputs[1]
// signer-sudt-cell should be outputs[2]
let expect = [
(
1,
data.user_lockscript.as_ref(),
xt_amount - xt_amount * SIGNER_FEE_RATE.0 / SIGNER_FEE_RATE.1,
),
(
2,
data.signer_lockscript.as_ref(),
xt_amount * SIGNER_FEE_RATE.0 / SIGNER_FEE_RATE.1,
),
];
debug!("expect: {:?}", expect);
for (i, lockscript, amount) in expect.iter() {
let script = load_cell_type(*i, Source::Output)?;
if script.is_none() {
return Err(Error::InvalidMintOutput);
}
let script = script.unwrap();
if !(script.code_hash().raw_data().as_ref() == SUDT_CODE_HASH.as_ref()
&& script.args().raw_data().as_ref() == lock_hash.as_ref()
&& script.hash_type() == 0u8.into())
{
return Err(Error::InvalidMintOutput);
}
let cell_data = load_cell_data(*i, Source::Output)?;
let mut amount_vec = [0u8; 16];
amount_vec.copy_from_slice(&cell_data);
let token_amount = u128::from_le_bytes(amount_vec);
debug!("token_amount: {}, amout: {}", token_amount, amount);
if token_amount != *amount {
return Err(Error::InvalidMintOutput);
}
let lock = load_cell_lock(*i, Source::Output)?;
debug!(
"lock: {:?}, expect lock: {:?}",
hex::encode(lock.as_slice()),
hex::encode(lockscript.as_ref())
);
if lock.as_slice() != lockscript.as_ref() {
return Err(Error::InvalidMintOutput);
}
}
Ok(())
}
fn verify_capacity() -> Result<(), Error> {
let toCKB_output_cap = load_cell_capacity(0, Source::GroupOutput)?;
let toCKB_input_cap = load_cell_capacity(0, Source::GroupInput)?;
if toCKB_input_cap - toCKB_output_cap != PLEDGE + XT_CELL_CAPACITY {
return Err(Error::CapacityInvalid);
}
let user_xt_cell_cap = load_cell_capacity(1, Source::Output)?;
if user_xt_cell_cap != PLEDGE {
return Err(Error::CapacityInvalid);
}
let signer_xt_cell_cap = load_cell_capacity(2, Source::Output)?;
if signer_xt_cell_cap != XT_CELL_CAPACITY {
return Err(Error::CapacityInvalid);
}
Ok(())
}
|
#[doc = "Register `SAI_ACLRFR` writer"]
pub type W = crate::W<SAI_ACLRFR_SPEC>;
#[doc = "Field `COVRUDR` writer - COVRUDR"]
pub type COVRUDR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CMUTEDET` writer - CMUTEDET"]
pub type CMUTEDET_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CWCKCFG` writer - CWCKCFG"]
pub type CWCKCFG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CCNRDY` writer - CCNRDY"]
pub type CCNRDY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CAFSDET` writer - CAFSDET"]
pub type CAFSDET_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CLFSDET` writer - CLFSDET"]
pub type CLFSDET_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl W {
#[doc = "Bit 0 - COVRUDR"]
#[inline(always)]
#[must_use]
pub fn covrudr(&mut self) -> COVRUDR_W<SAI_ACLRFR_SPEC, 0> {
COVRUDR_W::new(self)
}
#[doc = "Bit 1 - CMUTEDET"]
#[inline(always)]
#[must_use]
pub fn cmutedet(&mut self) -> CMUTEDET_W<SAI_ACLRFR_SPEC, 1> {
CMUTEDET_W::new(self)
}
#[doc = "Bit 2 - CWCKCFG"]
#[inline(always)]
#[must_use]
pub fn cwckcfg(&mut self) -> CWCKCFG_W<SAI_ACLRFR_SPEC, 2> {
CWCKCFG_W::new(self)
}
#[doc = "Bit 4 - CCNRDY"]
#[inline(always)]
#[must_use]
pub fn ccnrdy(&mut self) -> CCNRDY_W<SAI_ACLRFR_SPEC, 4> {
CCNRDY_W::new(self)
}
#[doc = "Bit 5 - CAFSDET"]
#[inline(always)]
#[must_use]
pub fn cafsdet(&mut self) -> CAFSDET_W<SAI_ACLRFR_SPEC, 5> {
CAFSDET_W::new(self)
}
#[doc = "Bit 6 - CLFSDET"]
#[inline(always)]
#[must_use]
pub fn clfsdet(&mut self) -> CLFSDET_W<SAI_ACLRFR_SPEC, 6> {
CLFSDET_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Clear flag register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sai_aclrfr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SAI_ACLRFR_SPEC;
impl crate::RegisterSpec for SAI_ACLRFR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`sai_aclrfr::W`](W) writer structure"]
impl crate::Writable for SAI_ACLRFR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SAI_ACLRFR to value 0"]
impl crate::Resettable for SAI_ACLRFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[macro_use]
extern crate derive_more;
pub mod dmu_stream;
pub mod fletcher4;
pub mod lsm;
pub mod split_tree;
|
use super::error::Result;
use super::{error, Walker};
use rio_api::parser::TriplesParser;
use rio_turtle::TurtleParser;
use skorm_store::RdfStore;
use std::fs::OpenOptions;
use std::io::BufReader;
use tracing::{debug, span, Level};
pub fn build_rdf_store(walker: &Walker) -> Result<RdfStore> {
let curie = walker.build_prefix_store()?;
let mut store = RdfStore::with_mapper(curie.clone());
for doc in walker.docs()? {
let _span = span!(Level::DEBUG, "walker.doc", doc = %doc.doc.rel);
let _enter = _span.enter();
let mut base = String::with_capacity(walker.root.len() + doc.dir.rel.len() + 1);
base.push_str(&walker.root);
base.push_str(&doc.dir.rel);
base.push('/');
debug!(%base);
{
let mut fs = OpenOptions::new()
.read(true)
.write(false)
.open(&doc.doc.path)?;
let prefixed = super::prefix::prefixed(&mut fs, &base, &curie);
let reader = BufReader::new(prefixed);
let mut parser = TurtleParser::new(reader, "")?;
debug!("Parse file");
parser.parse_all(&mut |triple| {
debug!(%triple.subject, %triple.predicate, %triple.object);
store.insert(triple).map_err(error::Error::from)
})?;
}
}
Ok(store)
}
|
use std::sync::atomic::Ordering;
use counter::{Counter, AtomicCounter};
use sequence::{Sequence, Limit, MultiCache, CacheError, CommitError};
#[derive(Debug, Default)]
pub struct Shared {
claimed: AtomicCounter,
count: AtomicCounter,
}
#[derive(Debug)]
pub struct Cache {
limit: Counter,
}
impl MultiCache for Shared {}
impl Sequence for Shared {
type Cache = Cache;
fn cache<L: Limit>(&self, limit: &L) -> Result<Cache, CacheError> {
match self.count.fetch() {
Ok(_) => Ok(Cache {
limit: limit.count(),
}),
Err(_) => Err(CacheError::SeqClosed)
}
}
fn counter(&self) -> &AtomicCounter {
&self.count
}
fn claim<L: Limit>(&self, cache: &mut Cache, limit: &L) -> Option<Counter> {
// Increase claimed counter
let claimed = self.claimed.incr()?;
loop {
// Fetch recent limit if cached limit is lower than claimed
if claimed >= cache.limit {
let recent_limit = limit.count();
debug_assert!(recent_limit >= cache.limit);
cache.limit = recent_limit;
}
// Revert if limit is lower than claimed
if claimed >= cache.limit {
match self.claimed.comp_swap(claimed + 1, claimed, Ordering::AcqRel) {
Ok(()) => return None,
Err(prev) => {
// Recheck limit if revert is failed
debug_assert!(prev.map_or(true, |prev| prev > claimed + 1));
continue;
}
}
}
return Some(claimed);
}
}
fn commit(&self, _cache: &mut Cache, count: Counter) -> Result<(), CommitError> {
loop {
match self.count.comp_swap(count, count + 1, Ordering::AcqRel) {
Ok(()) => return Ok(()),
Err(Some(_)) => continue, // Other thread modified it. Retry
Err(None) => return Err(CommitError), // Sequence closed.
}
}
}
}
|
use anyhow::Error;
use reqwest::Client;
use serde::de::DeserializeOwned;
use telegram_types::bot::{methods::*, types::*};
#[derive(Debug)]
pub struct Bot {
token: String,
client: Client,
}
impl Bot {
pub fn new(token: &str) -> Self {
Self {
token: token.to_owned(),
client: Client::new(),
}
}
async fn make_request<T, M>(&self, method: &M) -> Result<T, Error>
where
T: DeserializeOwned,
M: Method,
{
let response = self
.client
.get(&M::url(&self.token))
.json(&method)
.send()
.await?;
let result: Result<T, ApiError> = response.json::<TelegramResult<T>>().await?.into();
Ok(result?)
}
pub async fn send_message(
&self,
chat_id: &str,
text: &str,
url: &str,
) -> Result<Message, Error> {
let button = ReplyMarkup::InlineKeyboard(InlineKeyboardMarkup {
inline_keyboard: vec![vec![InlineKeyboardButton {
text: "原始地址".to_owned(),
pressed: InlineKeyboardButtonPressed::Url(url.to_owned()),
}]],
});
let message = SendMessage::new(ChatTarget::username(chat_id), text)
.parse_mode(ParseMode::HTML)
.reply_markup(button);
self.make_request::<Message, _>(&message).await
}
}
|
use lazy_static::lazy_static;
use syntect::highlighting::ThemeSet;
use syntect::parsing::SyntaxSet;
#[derive(Debug)]
pub struct Highlighter {
ps: syntect::parsing::SyntaxSet,
ts: syntect::highlighting::ThemeSet,
}
pub fn highlight_lines(code: &String, ext: &String) -> Option<String> {
lazy_static! {
static ref HIGHLIGHTER: Highlighter = Highlighter {
ps: SyntaxSet::load_defaults_newlines(),
ts: ThemeSet::load_defaults(),
};
}
let syntax = HIGHLIGHTER.ps.find_syntax_by_extension(ext)?;
Some(syntect::html::highlighted_html_for_string(
&code.as_str(),
&HIGHLIGHTER.ps,
syntax,
&HIGHLIGHTER.ts.themes["InspiredGitHub"],
))
}
|
mod avatar;
mod badge;
mod bws;
mod common;
mod compare;
mod country_snipe_list;
mod country_snipe_stats;
mod fix_score;
mod graph;
mod leaderboard;
mod map;
mod map_search;
mod match_compare;
mod match_costs;
mod match_live;
mod medal;
mod medal_stats;
mod medals_common;
mod medals_list;
mod medals_missing;
mod most_played;
mod most_played_common;
mod nochoke;
mod osekai_medal_count;
mod osekai_medal_rarity;
mod osustats_counts;
mod osustats_globals;
mod osustats_list;
mod osutracker_countrytop;
mod osutracker_mappers;
mod osutracker_maps;
mod osutracker_mapsets;
mod osutracker_mods;
mod pinned;
mod player_snipe_list;
mod player_snipe_stats;
mod pp_missing;
mod profile;
mod profile_compare;
mod rank;
mod rank_score;
mod ranking;
mod ranking_countries;
mod ratio;
mod recent;
mod recent_list;
mod scores;
mod simulate;
mod sniped;
mod sniped_difference;
mod top;
mod top_if;
mod top_single;
mod whatif;
use std::fmt::Write;
use rosu_pp::Mods;
use rosu_v2::prelude::{Beatmap, GameMode, GameMods};
use crate::util::{datetime::sec_to_minsec, numbers::round, BeatmapExt, ScoreExt};
pub use self::{
avatar::AvatarEmbed,
badge::BadgeEmbed,
bws::BWSEmbed,
common::CommonEmbed,
compare::{CompareEmbed, NoScoresEmbed},
country_snipe_list::CountrySnipeListEmbed,
country_snipe_stats::CountrySnipeStatsEmbed,
fix_score::FixScoreEmbed,
graph::GraphEmbed,
leaderboard::LeaderboardEmbed,
map::MapEmbed,
map_search::MapSearchEmbed,
match_compare::{MatchCompareMapEmbed, MatchCompareSummaryEmbed},
match_costs::MatchCostEmbed,
match_live::{MatchLiveEmbed, MatchLiveEmbeds},
medal::MedalEmbed,
medal_stats::MedalStatsEmbed,
medals_common::{MedalsCommonEmbed, MedalsCommonUser},
medals_list::MedalsListEmbed,
medals_missing::MedalsMissingEmbed,
most_played::MostPlayedEmbed,
most_played_common::MostPlayedCommonEmbed,
nochoke::NoChokeEmbed,
osekai_medal_count::MedalCountEmbed,
osekai_medal_rarity::MedalRarityEmbed,
osustats_counts::OsuStatsCountsEmbed,
osustats_globals::OsuStatsGlobalsEmbed,
osustats_list::OsuStatsListEmbed,
osutracker_countrytop::OsuTrackerCountryTopEmbed,
osutracker_mappers::OsuTrackerMappersEmbed,
osutracker_maps::OsuTrackerMapsEmbed,
osutracker_mapsets::OsuTrackerMapsetsEmbed,
osutracker_mods::OsuTrackerModsEmbed,
pinned::PinnedEmbed,
player_snipe_list::PlayerSnipeListEmbed,
player_snipe_stats::PlayerSnipeStatsEmbed,
pp_missing::PPMissingEmbed,
profile::ProfileEmbed,
profile_compare::ProfileCompareEmbed,
rank::RankEmbed,
rank_score::RankRankedScoreEmbed,
ranking::*,
ranking_countries::RankingCountriesEmbed,
ratio::RatioEmbed,
recent::RecentEmbed,
recent_list::RecentListEmbed,
scores::ScoresEmbed,
simulate::SimulateEmbed,
sniped::SnipedEmbed,
sniped_difference::SnipedDiffEmbed,
top::{OrderAppendix, TopEmbed},
top_if::TopIfEmbed,
top_single::TopSingleEmbed,
whatif::WhatIfEmbed,
};
pub fn get_stars(stars: f32) -> String {
format!("{:.2}★", stars)
}
pub fn get_mods(mods: GameMods) -> String {
if mods.is_empty() {
String::new()
} else {
format!("+{mods}")
}
}
pub fn get_combo(score: &dyn ScoreExt, map: &dyn BeatmapExt) -> String {
let mut combo = String::from("**");
let _ = write!(combo, "{}x**/", score.max_combo());
match map.max_combo() {
Some(amount) => write!(combo, "{amount}x").unwrap(),
None => combo.push('-'),
}
combo
}
pub fn get_pp(actual: Option<f32>, max: Option<f32>) -> String {
let mut result = String::with_capacity(17);
result.push_str("**");
if let Some(pp) = actual {
let _ = write!(result, "{:.2}", pp);
} else {
result.push('-');
}
result.push_str("**/");
if let Some(max) = max {
let pp = actual.map(|pp| pp.max(max)).unwrap_or(max);
let _ = write!(result, "{:.2}", pp);
} else {
result.push('-');
}
result.push_str("PP");
result
}
pub fn get_keys(mods: GameMods, map: &Beatmap) -> String {
if let Some(key_mod) = mods.has_key_mod() {
format!("[{key_mod}]")
} else {
format!("[{}K]", map.cs as u32)
}
}
pub fn calculate_od(od: f32, clock_rate: f32) -> f32 {
let ms = difficulty_range(od, OD_MIN, OD_MID, OD_MAX) / clock_rate;
(OD_MIN - ms) / (OD_MIN - OD_MID) * 5.0
}
const OD_MIN: f32 = 80.0;
const OD_MID: f32 = 50.0;
const OD_MAX: f32 = 20.0;
pub fn calculate_ar(ar: f32, clock_rate: f32) -> f32 {
let ms = difficulty_range(ar, AR_MIN, AR_MID, AR_MAX) / clock_rate;
if ms > AR_MID {
(AR_MIN - ms) / (AR_MIN - AR_MID) * 5.0
} else {
(AR_MID - ms) / (AR_MID - AR_MAX) * 5.0 + 5.0
}
}
const AR_MIN: f32 = 1800.0;
const AR_MID: f32 = 1200.0;
const AR_MAX: f32 = 450.0;
fn difficulty_range(difficulty: f32, min: f32, mid: f32, max: f32) -> f32 {
if difficulty > 5.0 {
mid + (max - mid) * (difficulty - 5.0) / 5.0
} else if difficulty < 5.0 {
mid - (mid - min) * (5.0 - difficulty) / 5.0
} else {
mid
}
}
/// The stars argument must already be adjusted for mods
pub fn get_map_info(map: &Beatmap, mods: GameMods, stars: f32) -> String {
let clock_rate = mods.bits().clock_rate();
let mut sec_total = map.seconds_total;
let mut sec_drain = map.seconds_drain;
let mut bpm = map.bpm;
let mut cs = map.cs;
let mut ar = map.ar;
let mut od = map.od;
let mut hp = map.hp;
if mods.contains(GameMods::HardRock) {
hp = (hp * 1.4).min(10.0);
od = (od * 1.4).min(10.0);
ar = (ar * 1.4).min(10.0);
cs = (cs * 1.3).min(10.0);
} else if mods.contains(GameMods::Easy) {
hp *= 0.5;
od *= 0.5;
ar *= 0.5;
cs *= 0.5;
}
if (clock_rate - 1.0).abs() > f64::EPSILON {
let clock_rate = clock_rate as f32;
bpm *= clock_rate;
sec_total = (sec_total as f32 / clock_rate) as u32;
sec_drain = (sec_drain as f32 / clock_rate) as u32;
od = calculate_od(od, clock_rate);
ar = calculate_ar(ar, clock_rate);
}
if map.mode == GameMode::MNA {
ar = map.ar;
od = map.od;
}
let mut map_info = String::with_capacity(128);
let _ = write!(map_info, "Length: `{}` ", sec_to_minsec(sec_total));
if sec_drain != sec_total {
let _ = write!(map_info, "(`{}`) ", sec_to_minsec(sec_drain));
}
let _ = write!(
map_info,
"BPM: `{}` Objects: `{}`\n\
CS: `{}` AR: `{}` OD: `{}` HP: `{}` Stars: `{}`",
round(bpm),
map.count_objects(),
round(cs),
round(ar),
round(od),
round(hp),
round(stars)
);
map_info
}
|
use crate::{
grid::records::{Records, RecordsMut},
settings::TableOption,
};
/// Set a tab size.
///
/// The size is used in order to calculate width correctly.
///
/// Default value is 4 (basically 1 '\t' equals 4 spaces).
///
/// IMPORTANT: The tab character might be not present in output,
/// it might be replaced by spaces.
///
/// # Example
///
/// ```
/// use tabled::{Table, settings::formatting::TabSize};
///
/// let text = "Some\ttext\t\twith \\tabs";
///
/// let mut table = Table::new([text]);
/// table.with(TabSize::new(4));
///
/// assert_eq!(
/// table.to_string(),
/// "+--------------------------------+\n\
/// | &str |\n\
/// +--------------------------------+\n\
/// | Some text with \\tabs |\n\
/// +--------------------------------+"
/// )
/// ```
#[derive(Debug, Default, Clone)]
pub struct TabSize(usize);
impl TabSize {
/// Creates new [`TabSize`] object.
pub fn new(size: usize) -> Self {
Self(size)
}
}
impl<R, D, C> TableOption<R, D, C> for TabSize
where
for<'a> &'a R: Records,
R: RecordsMut<String>,
{
fn change(self, records: &mut R, _: &mut C, _: &mut D) {
let mut list = vec![];
for (row, cells) in records.iter_rows().into_iter().enumerate() {
for (col, text) in cells.into_iter().enumerate() {
let text = text.as_ref().replace('\t', &" ".repeat(self.0));
list.push(((row, col), text));
}
}
for (pos, text) in list {
records.set(pos, text);
}
}
}
|
use std::ffi::CStr;
use std::fmt;
use std::marker::PhantomData;
use std::os::raw::{c_int, c_uchar, c_void};
use std::ptr;
use std::slice;
use crate::error::{Error, Result};
use crate::panic;
/// The result of a successful SOA lookup.
#[derive(Debug)]
pub struct SOAResult {
soa_reply: *mut c_ares_sys::ares_soa_reply,
phantom: PhantomData<c_ares_sys::ares_soa_reply>,
}
impl SOAResult {
/// Obtain an `SOAResult` from the response to an SOA lookup.
pub fn parse_from(data: &[u8]) -> Result<SOAResult> {
let mut soa_reply: *mut c_ares_sys::ares_soa_reply = ptr::null_mut();
let parse_status = unsafe {
c_ares_sys::ares_parse_soa_reply(data.as_ptr(), data.len() as c_int, &mut soa_reply)
};
if parse_status == c_ares_sys::ARES_SUCCESS {
let result = SOAResult::new(soa_reply);
Ok(result)
} else {
Err(Error::from(parse_status))
}
}
fn new(soa_reply: *mut c_ares_sys::ares_soa_reply) -> Self {
SOAResult {
soa_reply,
phantom: PhantomData,
}
}
/// Returns the name server from this `SOAResult`.
///
/// In practice this is very likely to be a valid UTF-8 string, but the underlying `c-ares`
/// library does not guarantee this - so we leave it to users to decide whether they prefer a
/// fallible conversion, a lossy conversion, or something else altogether.
pub fn name_server(&self) -> &CStr {
unsafe { CStr::from_ptr((*self.soa_reply).nsname) }
}
/// Returns the hostmaster from this `SOAResult`.
///
/// In practice this is very likely to be a valid UTF-8 string, but the underlying `c-ares`
/// library does not guarantee this - so we leave it to users to decide whether they prefer a
/// fallible conversion, a lossy conversion, or something else altogether.
pub fn hostmaster(&self) -> &CStr {
unsafe { CStr::from_ptr((*self.soa_reply).hostmaster) }
}
/// Returns the serial number from this `SOAResult`.
pub fn serial(&self) -> u32 {
unsafe { (*self.soa_reply).serial }
}
/// Returns the refresh time from this `SOAResult`.
pub fn refresh(&self) -> u32 {
unsafe { (*self.soa_reply).refresh }
}
/// Returns the retry time from this `SOAResult`.
pub fn retry(&self) -> u32 {
unsafe { (*self.soa_reply).retry }
}
/// Returns the expire time from this `SOAResult`.
pub fn expire(&self) -> u32 {
unsafe { (*self.soa_reply).expire }
}
/// Returns the minimum time-to-live from this `SOAResult`.
pub fn min_ttl(&self) -> u32 {
unsafe { (*self.soa_reply).minttl }
}
}
impl fmt::Display for SOAResult {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"Name server: {}, ",
self.name_server().to_str().unwrap_or("<not utf8>")
)?;
write!(
fmt,
"Hostmaster: {}, ",
self.hostmaster().to_str().unwrap_or("<not utf8>")
)?;
write!(fmt, "Serial: {}, ", self.serial())?;
write!(fmt, "Refresh: {}, ", self.refresh())?;
write!(fmt, "Retry: {}, ", self.retry())?;
write!(fmt, "Expire: {}, ", self.expire())?;
write!(fmt, "Minimum time-to-live: {}", self.min_ttl())
}
}
impl Drop for SOAResult {
fn drop(&mut self) {
unsafe { c_ares_sys::ares_free_data(self.soa_reply as *mut c_void) }
}
}
unsafe impl Send for SOAResult {}
unsafe impl Sync for SOAResult {}
pub(crate) unsafe extern "C" fn query_soa_callback<F>(
arg: *mut c_void,
status: c_int,
_timeouts: c_int,
abuf: *mut c_uchar,
alen: c_int,
) where
F: FnOnce(Result<SOAResult>) + Send + 'static,
{
ares_callback!(arg as *mut F, status, abuf, alen, SOAResult::parse_from);
}
|
mod migrations;
mod store;
mod types;
pub use store::{DefaultIndexerStore, IndexerStore};
pub use types::{CellTransaction, IndexerConfig, LiveCell, TransactionPoint};
|
use super::{deployment_process::DeploymentProcess, plan::Plan, recipe::DeploymentRecipe};
use crate::config::Deployment;
use crate::util::ask_for_confirm;
use crate::wallet::{cli_types::LiveCell, Wallet};
use anyhow::{anyhow, Result};
use chrono::prelude::*;
use ckb_tool::ckb_types::core::{Capacity, TransactionView};
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
#[derive(Clone, Copy, Debug)]
pub struct DeployOption {
pub migrate: bool,
pub tx_fee: Capacity,
}
/// Deployment manage
/// 1. manage migrations
/// 2, handle deploy new / rerun / migrate
pub struct Manage {
migration_dir: PathBuf,
deployment: Deployment,
}
impl Manage {
pub fn new(migration_dir: PathBuf, deployment: Deployment) -> Self {
Manage {
migration_dir,
deployment,
}
}
/// create a snapshot in migration dir
fn snapshot_recipe(&self, recipe: &DeploymentRecipe) -> Result<()> {
let now: DateTime<Utc> = Utc::now();
let snapshot_name = now.format("%Y-%m-%d-%H%M%S.json").to_string();
let mut path = self.migration_dir.clone();
path.push(snapshot_name);
let content = serde_json::to_vec(recipe)?;
fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(path)?
.write_all(&content)?;
Ok(())
}
fn load_snapshot(&self, snapshot_name: String) -> Result<DeploymentRecipe> {
let mut path = self.migration_dir.clone();
path.push(snapshot_name);
let mut buf = Vec::new();
fs::File::open(path)?.read_to_end(&mut buf)?;
let recipe = serde_json::from_slice(&buf)?;
Ok(recipe)
}
fn collect_migration_live_cells(&self, wallet: &Wallet) -> Result<Vec<(String, LiveCell)>> {
// read last migration
let file_names: Vec<_> = fs::read_dir(&self.migration_dir)?
.map(|d| d.map(|d| d.file_name()))
.collect::<Result<_, _>>()?;
let last_migration_file = file_names.into_iter().max();
let mut cells = Vec::new();
if last_migration_file.is_none() {
return Ok(cells);
}
let last_migration_file = last_migration_file.unwrap();
let recipe = self.load_snapshot(last_migration_file.into_string().unwrap())?;
// query cells recipes
for cell in recipe.cell_recipes {
if let Some(tx) = wallet.query_transaction(&cell.tx_hash)? {
let output = &tx.transaction.inner.outputs[cell.index as usize];
let live_cell = LiveCell {
tx_hash: tx.transaction.hash.clone(),
index: cell.index,
capacity: output.capacity.value(),
mature: true,
};
cells.push((cell.name.clone(), live_cell));
}
}
// query dep groups recipes
for dep_group in recipe.dep_group_recipes {
if let Some(tx) = wallet.query_transaction(&dep_group.tx_hash)? {
let output = &tx.transaction.inner.outputs[dep_group.index as usize];
let live_cell = LiveCell {
tx_hash: tx.transaction.hash.clone(),
index: dep_group.index,
capacity: output.capacity.value(),
mature: true,
};
cells.push((dep_group.name.clone(), live_cell));
}
}
Ok(cells)
}
pub fn deploy(&self, wallet: Wallet, opt: DeployOption) -> Result<()> {
if !self.migration_dir.exists() {
fs::create_dir_all(&self.migration_dir)?;
println!("Create directory {:?}", self.migration_dir);
}
let mut pre_inputs = Vec::new();
let deployment = self.deployment.clone();
if opt.migrate {
pre_inputs.extend(self.collect_migration_live_cells(&wallet)?);
}
let mut process = DeploymentProcess::new(deployment, wallet, opt.tx_fee);
let (recipe, txs) = process.prepare_recipe(pre_inputs.clone())?;
if txs.is_empty() {
return Err(anyhow!("Nothing to deploy"));
}
self.output_deployment_plan(&recipe, &txs, &pre_inputs, &opt);
if ask_for_confirm("Confirm deployment?")? {
self.snapshot_recipe(&recipe)?;
let txs = process.sign_txs(txs)?;
process.execute_recipe(recipe, txs)?;
println!("Deployment complete");
} else {
println!("Cancelled");
}
Ok(())
}
fn output_deployment_plan(
&self,
recipe: &DeploymentRecipe,
txs: &[TransactionView],
pre_inputs: &[(String, LiveCell)],
opt: &DeployOption,
) {
let migrated_capacity = pre_inputs
.iter()
.map(|(_name, cell)| cell.capacity)
.sum::<u64>();
let total_occupied_capacity = txs
.iter()
.map(|tx| {
tx.outputs_with_data_iter()
.filter_map(|(output, data)| {
if data.is_empty() {
None
} else {
let data_capacity = Capacity::bytes(data.len()).expect("bytes");
Some(
output
.occupied_capacity(data_capacity)
.expect("occupied")
.as_u64(),
)
}
})
.sum::<u64>()
})
.sum::<u64>();
let new_capacity = total_occupied_capacity - migrated_capacity;
let plan = Plan::new(
migrated_capacity,
new_capacity,
total_occupied_capacity,
opt.tx_fee.as_u64() * txs.len() as u64,
recipe.to_owned(),
);
let plan = serde_yaml::to_string(&plan).unwrap();
println!("Deployment plan:");
println!("{}", plan);
}
}
|
pub use feelui_core::*;
#[cfg(feature = "baseview")]
pub use feelui_baseview::*; |
extern crate stringly_typed_rust_esosyntax;
use stringly_typed_rust_esosyntax::stringly_typed;
stringly_typed!{"'ANSWER'id'i32'ty'42'int"const}
fn main() {
println!("The answer is {}!", ANSWER);
}
|
use vec3::Vec3;
use rand::{thread_rng, Rng};
pub fn random() -> f32 {
thread_rng().gen_range(0.0, 1.0)
}
pub fn random_in_unit_sphere() -> Vec3 {
let mut p: Vec3;
loop {
p = 2.0 * Vec3::new(random(), random(), random()) -
Vec3::new(1.0, 1.0, 1.0);
if p.squared_length() >= 1.0 {
break;
}
}
p
}
|
use super::disk::*;
static MAX_DATA_SIZE: u32 = 50;
pub fn lift<'a, A: 'a, B: 'a>(f: Box<dyn Fn(A) -> B>) -> Box<dyn Fn(Option<A>) -> Option<B> + 'a> {
Box::new(move |x| match x {
Some(a) => Some(f(a)),
_ => None,
})
}
pub fn lift_disk_action<'a, A: 'a, B: 'a>(
f: Box<dyn Fn(A) -> DiskAction<'a, B>>,
) -> Box<dyn Fn(Option<A>) -> DiskAction<'a, Option<B>> + 'a> {
Box::new(move |a: Option<A>| match a {
Some(b) => map(f(b), Box::new(|c| Some(c))),
None => Box::new(|disk| (None, disk)),
})
}
pub fn remove_options<A>(b: Vec<Option<A>>) -> Vec<A> {
b.into_iter()
.fold(Vec::<A>::new(), |mut acc, curr| match curr {
Some(a) => {
acc.push(a);
acc
}
None => acc,
})
}
pub fn string_to_block_data_chunks(s: String) -> Vec<String> {
let s = s.chars().collect::<Vec<char>>();
s.chunks(MAX_DATA_SIZE as usize)
.map(|chunk| chunk.iter().collect())
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn string_to_block_data_chunks_return_expected() {
let r = string_to_block_data_chunks(
" This is my stirn go fahst ea;lsf jasjfadklsjfal;sdfjads f".into(),
);
assert_eq!(r.len(), 2);
}
}
|
pub mod producer;
pub mod producer_structs;
pub mod producer_to_game;
|
use crate::traverse::{traverse, VisitedNode};
use log::debug;
use std::fmt;
use yaml_rust::Yaml;
pub const SPLAT: &'static str = "**";
pub const CHILD_FILTER_DELIM: &'static str = "==";
#[derive(Debug, PartialEq)]
pub enum ArrayIndices {
Star,
Indices(Vec<usize>),
}
#[derive(Debug, Clone, PartialEq)]
pub struct ParseError(String);
impl ParseError {
pub fn new(s: &str) -> ParseError {
ParseError(s.to_string())
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(PartialEq, Debug)]
enum PathElem {
Char,
Dot,
Quote,
ArrayOpen,
ArrayClose,
ParenOpen,
ParenClose,
EOW,
}
fn char_is(c: char) -> PathElem {
match c {
'.' => PathElem::Dot,
'"' => PathElem::Quote,
'[' => PathElem::ArrayOpen,
']' => PathElem::ArrayClose,
'(' => PathElem::ParenOpen,
')' => PathElem::ParenClose,
_ => PathElem::Char,
}
}
fn next_special_char_is(s: &str) -> (PathElem, usize) {
for (idx, c) in s.chars().enumerate() {
let path_elem = char_is(c);
if path_elem != PathElem::Char {
return (path_elem, idx);
}
}
(PathElem::EOW, s.len())
}
fn next_specific_special_char(s: &str, pe: PathElem) -> (bool, usize) {
for (idx, c) in s.chars().enumerate() {
if char_is(c) == pe {
return (true, idx);
}
}
match pe {
PathElem::EOW => (true, s.len()),
_ => (false, 0),
}
}
pub fn parse_path(path: &str) -> Result<Vec<String>, ParseError> {
let mut parsed_path: Vec<String> = vec![];
let mut current_idx = 0;
while current_idx < path.len() {
match next_special_char_is(&path[current_idx..]) {
(PathElem::Dot, relative_dot_idx) => {
let dot_idx = current_idx + relative_dot_idx;
if dot_idx == current_idx {
current_idx += 1;
continue;
}
parsed_path.push(path[current_idx..dot_idx].to_string());
current_idx = dot_idx + 1;
}
(PathElem::Quote, relative_start_quote_idx) => {
let start_quoted_word_idx = current_idx + 1 + relative_start_quote_idx;
let (found, relative_end_quote_idx) =
next_specific_special_char(&path[start_quoted_word_idx..], PathElem::Quote);
if found {
let end_quote_idx = start_quoted_word_idx + relative_end_quote_idx;
parsed_path.push(path[start_quoted_word_idx..end_quote_idx].to_string());
current_idx = end_quote_idx + 1;
} else {
return Err(ParseError::new(&format!(
"invalid path `{}`, no closing quote",
path
)));
}
}
(PathElem::ArrayOpen, relative_array_open_idx) => {
let array_open_idx = current_idx + relative_array_open_idx;
if array_open_idx != current_idx {
parsed_path.push(path[current_idx..array_open_idx].to_string());
}
let (found, relative_array_close_idx) =
next_specific_special_char(&path[array_open_idx..], PathElem::ArrayClose);
if found {
let array_close_idx = array_open_idx + relative_array_close_idx;
parsed_path.push(path[array_open_idx..array_close_idx + 1].to_string());
current_idx = array_close_idx + 1;
} else {
return Err(ParseError::new(&format!(
"invalid path `{}`, no closing array character",
path
)));
}
}
(PathElem::ParenOpen, relative_paren_open_idx) => {
let paren_open_idx = current_idx + relative_paren_open_idx;
if paren_open_idx != current_idx {
parsed_path.push(path[current_idx..paren_open_idx].to_string());
}
let (found, relative_paren_close_idx) =
next_specific_special_char(&path[paren_open_idx..], PathElem::ParenClose);
if found {
let paren_close_idx = paren_open_idx + relative_paren_close_idx;
parsed_path.push(path[paren_open_idx..paren_close_idx + 1].to_string());
current_idx = paren_close_idx + 1;
} else {
return Err(ParseError::new(&format!(
"invalid path `{}`, no closing paren character",
path
)));
}
}
(PathElem::ArrayClose, _) => {
return Err(ParseError::new(&format!(
"invalid path `{}`, closing array character before opening",
path
)));
}
(PathElem::ParenClose, _) => {
return Err(ParseError::new(&format!(
"invalid path `{}`, closing paren character before opening",
path
)));
}
(PathElem::EOW, _) => {
parsed_path.push(path[current_idx..].to_string());
break;
}
_ => {
return Err(ParseError::new(&format!("invalid path `{}`", path)));
}
}
}
Ok(parsed_path)
}
pub fn split_child_filter(filter: &str) -> Result<[&str; 2], ParseError> {
if !filter.contains(CHILD_FILTER_DELIM) {
return Err(ParseError::new(&format!(
"invalid child filter: `{}`",
filter
)));
}
let split_filter: Vec<&str> = filter.split(CHILD_FILTER_DELIM).collect();
if split_filter.len() != 2 {
return Err(ParseError::new(&format!(
"invalid child filter: `{}`",
filter
)));
}
let mut split_filter_array = ["", ""];
split_filter_array.copy_from_slice(&split_filter);
Ok(split_filter_array)
}
pub fn parse_array_child_filter(
path_elem: &str,
array_node: &Vec<Yaml>,
is_final_path_elem: bool,
) -> Result<ArrayIndices, ParseError> {
if path_elem == "*" {
return Ok(ArrayIndices::Star);
}
let filter_key_and_value = crate::path::split_child_filter(path_elem)?;
let (filter_path, filter_value) = (filter_key_and_value[0], filter_key_and_value[1]);
// parse filter_path
let parsed_path = parse_path(filter_path)?;
debug!("parsed path for child filtering: {:?}", parsed_path);
let mut indices: Vec<usize> = vec![];
if is_final_path_elem {
// child value filter
debug!("running a child value filter");
for (idx, array_elem) in array_node.iter().enumerate() {
if matches_pattern(
&crate::convert::convert_single_node(array_elem),
filter_value,
) {
debug!("array_elem matched child value filter: {:?}", array_elem);
indices.push(idx);
}
}
debug!("child value filtering matched indices: {:?}", indices);
} else {
// run a traverse search again against each node to determine if this is a valid child path
debug!("running a child node filter");
for (idx, array_elem) in array_node.iter().enumerate() {
let mut visited = Vec::<VisitedNode>::new();
traverse(
array_elem,
"",
&parsed_path,
String::new(),
false,
&mut visited,
);
if visited.len() != 1 {
debug!(
"array_elem did not match child node filter, continuing: {:?}",
array_elem
);
continue;
}
let ref visited_elem = visited[0];
if matches_pattern(
&crate::convert::convert_single_node(visited_elem.yml),
filter_value, // path element for child filter
) {
debug!("array_elem matched child node filter: {:?}", array_elem);
indices.push(idx);
}
}
debug!("child node filtering matched indices: {:?}", indices);
}
Ok(ArrayIndices::Indices(indices))
}
pub fn parse_array_indexing_operation(path_elem: &str) -> Result<ArrayIndices, ParseError> {
if path_elem == "*" {
return Ok(ArrayIndices::Star);
}
match path_elem.parse::<usize>() {
Ok(i) => Ok(ArrayIndices::Indices(vec![i])),
Err(e) => Err(ParseError(format!(
"unable to parse array index `{:?}`, error: {:?}",
path_elem, e
))),
}
}
pub fn matches_pattern(v: &str, pattern: &str) -> bool {
if v == pattern || pattern == SPLAT {
return true;
}
if pattern.ends_with('*') {
let truncated_p = pattern.trim_end_matches('*');
if v.starts_with(truncated_p) {
return true;
}
}
false
}
pub fn is_child_filter(p: &str) -> bool {
p.starts_with('(') && p.ends_with(')')
}
pub fn is_child_filter_value_match(v: &Yaml, pattern: &str) -> Result<bool, ParseError> {
let filter_key_and_value = split_child_filter(pattern)?;
let v_str = &crate::convert::convert_single_node(v);
let filter_value = filter_key_and_value[1];
Ok(matches_pattern(v_str, filter_value))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_char_is() {
assert_eq!(PathElem::Dot, char_is('.'));
assert_eq!(PathElem::Quote, char_is('"'));
assert_eq!(PathElem::ArrayOpen, char_is('['));
assert_eq!(PathElem::ArrayClose, char_is(']'));
assert_eq!(PathElem::ParenOpen, char_is('('));
assert_eq!(PathElem::ParenClose, char_is(')'));
assert_eq!(PathElem::Char, char_is('a'));
}
#[test]
fn test_next_special_char_is() {
assert_eq!((PathElem::Dot, 4), next_special_char_is("asdf.asdf"));
assert_eq!((PathElem::Quote, 4), next_special_char_is("asdf\"asdf"));
assert_eq!((PathElem::ArrayOpen, 4), next_special_char_is("asdf[asdf"));
assert_eq!((PathElem::ArrayClose, 4), next_special_char_is("asdf]asdf"));
assert_eq!((PathElem::ParenOpen, 4), next_special_char_is("asdf(asdf"));
assert_eq!((PathElem::ParenClose, 4), next_special_char_is("asdf)asdf"));
assert_eq!((PathElem::EOW, 8), next_special_char_is("asdfasdf"));
}
#[test]
fn test_next_specific_special_char_is() {
assert_eq!(
(true, 4),
next_specific_special_char("asdf.asdf", PathElem::Dot)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdf.asdf", PathElem::Quote)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::Dot)
);
assert_eq!(
(true, 4),
next_specific_special_char("asdf\"asdf", PathElem::Quote)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdf\"asdf", PathElem::Dot)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::Quote)
);
assert_eq!(
(true, 4),
next_specific_special_char("asdf[asdf", PathElem::ArrayOpen)
);
assert_eq!(
(true, 4),
next_specific_special_char("asdf(asdf", PathElem::ParenOpen)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdf[asdf", PathElem::Dot)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::ArrayOpen)
);
assert_eq!(
(true, 4),
next_specific_special_char("asdf]asdf", PathElem::ArrayClose)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::ParenOpen)
);
assert_eq!(
(true, 4),
next_specific_special_char("asdf)asdf", PathElem::ParenClose)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdf]asdf", PathElem::Dot)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::ArrayClose)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::ParenClose)
);
assert_eq!(
(true, 0),
next_specific_special_char("asdfasdf", PathElem::Char)
);
assert_eq!(
(false, 0),
next_specific_special_char("asdfasdf", PathElem::Dot)
);
assert_eq!(
(true, 8),
next_specific_special_char("asdfasdf", PathElem::EOW)
);
}
#[test]
fn test_matches_pattern_identical() {
assert!(matches_pattern("rusty", "rusty"));
}
#[test]
fn test_matches_pattern_splat() {
assert!(matches_pattern("rusty", "r*"));
}
#[test]
fn test_matches_pattern_wildcard() {
assert!(matches_pattern("rusty", "**"));
}
#[test]
fn test_matches_pattern_no() {
assert!(!matches_pattern("rusty", "smooth"));
}
#[test]
fn test_is_child_filter_no() {
assert!(!is_child_filter("[.==crabby]"));
assert!(!is_child_filter("(.==crabby]"));
assert!(!is_child_filter("[.==crabby)"));
assert!(!is_child_filter(""));
}
#[test]
fn test_is_child_filter_yes() {
assert!(is_child_filter("(.==crabby)"));
assert!(is_child_filter("(crabby)"));
assert!(is_child_filter("()"));
}
#[test]
fn test_is_child_filter_value_match_not_filter_errs() {
assert_eq!(
true,
is_child_filter_value_match(&Yaml::String("crabby".to_string()), "crabby").is_err()
);
}
#[test]
fn test_is_child_filter_value_match_not_a_match() {
assert!(
!is_child_filter_value_match(&Yaml::String("crabby".to_string()), "(.==nope)").unwrap()
);
}
#[test]
fn test_is_child_filter_value_match_is_a_match() {
assert!(
is_child_filter_value_match(&Yaml::String("crabby".to_string()), ".==crabby").unwrap()
);
assert!(
is_child_filter_value_match(&Yaml::String("crabby".to_string()), ".==crab*").unwrap()
);
}
#[test]
fn test_split_child_filter_valid() {
let split_filter = split_child_filter(".==crabby").unwrap();
assert_eq!(split_filter[0], ".");
assert_eq!(split_filter[1], "crabby");
}
#[test]
fn test_split_child_filter_invalid() {
assert_eq!(true, split_child_filter(".=crabby").is_err());
assert_eq!(true, split_child_filter("").is_err());
}
#[test]
fn test_parse_array_child_filter_star() {
assert_eq!(
ArrayIndices::Star,
parse_array_child_filter("*", &vec![Yaml::Null], false).unwrap()
);
}
#[test]
fn test_parse_array_child_filter_final() {
assert_eq!(
ArrayIndices::Indices(vec![0, 2]),
parse_array_child_filter(
".==dog*",
&vec![
Yaml::String("dog".to_string()),
Yaml::String("cat".to_string()),
Yaml::String("doggerino".to_string())
],
true
)
.unwrap()
);
}
#[test]
fn test_parse_array_child_filter_node() {
use yaml_rust::YamlLoader;
let docs_str = "
- b:
a1: 1
d: dog
- b:
a2: 2
d: cat
- b:
a3: 3
d: doggerino";
let doc = &YamlLoader::load_from_str(&docs_str).unwrap()[0];
let array = match doc {
Yaml::Array(v) => v,
_ => panic!("invalid doc, not an array"),
};
assert_eq!(
ArrayIndices::Indices(vec![0, 2]),
parse_array_child_filter("b.d==dog*", array, false).unwrap()
);
}
#[test]
fn test_parse_array_child_filter_invalid() {
use yaml_rust::YamlLoader;
let docs_str = "
- b";
let doc = &YamlLoader::load_from_str(&docs_str).unwrap()[0];
let array = match doc {
Yaml::Array(v) => v,
_ => panic!("invalid doc, not an array"),
};
assert_eq!(true, parse_array_child_filter(".=b", array, false).is_err());
}
#[test]
fn test_parse_array_indexing_operation_wildcard() {
assert_eq!(
ArrayIndices::Star,
parse_array_indexing_operation("*").unwrap()
);
}
#[test]
fn test_parse_array_indexing_operation_number_path_elem() {
assert_eq!(
ArrayIndices::Indices(vec![4]),
parse_array_indexing_operation("4").unwrap()
);
}
#[test]
fn test_parse_array_indexing_operation_fails_invalid() {
assert_eq!(true, parse_array_indexing_operation("a").is_err());
}
}
|
extern crate libc;
#[link(name = "sodium",style="static")]
extern "C" {
fn sodium_init() -> i32;
fn randombytes_buf(buf: *mut u8, size: libc::size_t);
fn crypto_secretbox_xsalsa20poly1305_keybytes() -> libc::size_t;
fn crypto_secretbox_xsalsa20poly1305_noncebytes() -> libc::size_t;
fn crypto_secretbox_xsalsa20poly1305_macbytes() -> libc::size_t;
fn crypto_secretbox_xsalsa20poly1305_boxzerobytes() -> libc::size_t;
fn crypto_secretbox_xsalsa20poly1305_zerobytes() -> libc::size_t;
fn crypto_secretbox_xsalsa20poly1305(c: *mut u8,
m: *const u8,
mlen: libc::c_ulonglong,
n: *const u8,
k: *const u8)
-> libc::c_int;
fn crypto_secretbox_xsalsa20poly1305_open(m: *mut u8,
c: *const u8,
clen: libc::c_ulonglong,
n: *const u8,
k: *const u8)
-> libc::c_int;
fn crypto_aead_chacha20poly1305_keybytes() -> libc::size_t;
fn crypto_aead_chacha20poly1305_nsecbytes() -> libc::size_t;
fn crypto_aead_chacha20poly1305_npubbytes() -> libc::size_t;
fn crypto_aead_chacha20poly1305_abytes() -> libc::size_t;
fn crypto_aead_chacha20poly1305_encrypt(c: *mut u8,
clen: *mut libc::c_ulonglong,
m: *const u8,
mlen: libc::c_ulonglong,
ad: *const u8,
adlen: libc::c_ulonglong,
nsec: *const u8,
npub: *const u8,
k: *const u8)
-> libc::c_int;
fn crypto_aead_chacha20poly1305_decrypt(m: *mut u8,
mlen: *mut libc::c_ulonglong,
nsec: *mut u8,
c: *const u8,
clen: libc::c_ulonglong,
ad: *const u8,
adlen: libc::c_ulonglong,
npub: *const u8,
k: *const u8)
-> libc::c_int;
}
pub fn sodium_init_l() -> bool {
unsafe {
if sodium_init() == -1 {
false
} else {
true
}
}
}
use std::iter::repeat;
pub fn randombytes_buf_l(size: libc::size_t) -> Vec<u8> {
let mut buf: Vec<u8> = repeat(0u8).take(size).collect();
unsafe {
let pbuf = buf.as_mut_ptr();
randombytes_buf(pbuf, size);
}
buf
}
pub fn keybytes_xsalsa20() -> usize {
unsafe { crypto_secretbox_xsalsa20poly1305_keybytes() }
}
pub fn nsecbytes_xsalsa20() -> usize {
unsafe { crypto_secretbox_xsalsa20poly1305_noncebytes() }
}
pub fn macbytes_xsalsa20() -> usize {
unsafe { crypto_secretbox_xsalsa20poly1305_macbytes() }
}
pub fn boxzerobytes_xsalsa20() -> usize {
unsafe { crypto_secretbox_xsalsa20poly1305_boxzerobytes() }
}
pub fn zerobytes_xsalsa20() -> usize {
unsafe { crypto_secretbox_xsalsa20poly1305_zerobytes() }
}
pub fn crypto_secretbox_xsalsa20poly1305_l(c: *mut u8,
m: *const u8,
mlen: libc::c_ulonglong,
n: *const u8,
k: *const u8)
-> bool {
unsafe { crypto_secretbox_xsalsa20poly1305(c, m, mlen, n, k) == 0 }
}
pub fn crypto_secretbox_xsalsa20poly1305_open_l(m: *mut u8,
c: *const u8,
clen: libc::c_ulonglong,
n: *const u8,
k: *const u8)
-> bool {
unsafe { crypto_secretbox_xsalsa20poly1305_open(m, c, clen, n, k) == 0 }
}
pub fn keybytes_chacha20() -> usize {
unsafe { crypto_aead_chacha20poly1305_keybytes() }
}
pub fn nsecbytes_chacha20() -> usize {
unsafe { crypto_aead_chacha20poly1305_nsecbytes() }
}
pub fn npubbytes_chacha20() -> usize {
unsafe { crypto_aead_chacha20poly1305_npubbytes() }
}
pub fn abytes_chacha20() -> usize {
unsafe { crypto_aead_chacha20poly1305_abytes() }
}
pub fn crypto_aead_chacha20poly1305_encrypt_l(c: *mut u8,
clen: *mut libc::c_ulonglong,
m: *const u8,
mlen: libc::c_ulonglong,
ad: *const u8,
adlen: libc::c_ulonglong,
nsec: *const u8,
npub: *const u8,
k: *const u8)
-> bool {
unsafe { crypto_aead_chacha20poly1305_encrypt(c, clen, m, mlen, ad, adlen, nsec, npub, k) == 0 }
}
pub fn crypto_aead_chacha20poly1305_decrypt_l(m: *mut u8,
mlen: *mut libc::c_ulonglong,
nsec: *mut u8,
c: *const u8,
clen: libc::c_ulonglong,
ad: *const u8,
adlen: libc::c_ulonglong,
npub: *const u8,
k: *const u8)
-> bool {
unsafe { crypto_aead_chacha20poly1305_decrypt(m, mlen, nsec, c, clen, ad, adlen, npub, k) == 0 }
}
|
use crate::error::{NiaServerError, NiaServerResult};
use crate::protocol::Serializable;
use nia_protocol_rust::StopListeningRequest;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct NiaStopListeningRequest {}
impl NiaStopListeningRequest {
pub fn new() -> NiaStopListeningRequest {
NiaStopListeningRequest {}
}
}
impl
Serializable<
NiaStopListeningRequest,
nia_protocol_rust::StopListeningRequest,
> for NiaStopListeningRequest
{
fn to_pb(&self) -> StopListeningRequest {
let mut stop_listening_request_pb =
nia_protocol_rust::StopListeningRequest::new();
stop_listening_request_pb
}
fn from_pb(
object_pb: StopListeningRequest,
) -> NiaServerResult<NiaStopListeningRequest> {
let mut stop_listening_request = NiaStopListeningRequest::new();
Ok(stop_listening_request)
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn serializes_and_deserializes() {
let expected = NiaStopListeningRequest::new();
let bytes = expected.to_bytes().unwrap();
let result = NiaStopListeningRequest::from_bytes(bytes).unwrap();
assert_eq!(expected, result);
}
}
|
use crate::error::Result;
use crate::{
compression::{create_codec, Codec},
read::{CompressedDataPage, Page, PageHeader},
};
fn compress_(buffer: &[u8], decompressor: &mut dyn Codec) -> Result<Vec<u8>> {
let mut compressed_buffer = Vec::new();
decompressor.compress(buffer, &mut compressed_buffer)?;
Ok(compressed_buffer)
}
fn compress_v1(mut page: PageV1, codec: &mut dyn Codec) -> Result<PageV1> {
page.buffer = compress_(&page.buffer, codec)?;
Ok(page)
}
fn compress_v2(mut page: PageV2, codec: &mut dyn Codec) -> Result<PageV2> {
// only values are compressed in v2:
// [<rep data> <def data> <values>] -> [<rep data> <def data> <compressed_values>]
let prefix = (page.header.repetition_levels_byte_length
+ page.header.definition_levels_byte_length) as usize;
let compressed_values = compress_(&page.buffer[prefix..], codec)?;
page.buffer.truncate(prefix);
page.buffer.extend(compressed_values);
Ok(page)
}
/// decompresses a page in place. This only changes the pages' internal buffer.
pub fn compress(page: Page) -> Result<CompressedDataPage> {
match page {
Page::V1(page) => {
let codec = create_codec(&page.compression)?;
if let Some(mut codec) = codec {
compress_v1(page, codec.as_mut()).map(CompressedDataPage::V1)
} else {
Ok(CompressedDataPage::V1(page))
}
}
Page::V2(page) => {
let codec = create_codec(&page.compression)?;
if let Some(mut codec) = codec {
compress_v2(page, codec.as_mut()).map(CompressedDataPage::V2)
} else {
Ok(CompressedDataPage::V2(page))
}
}
}
}
|
#[doc = "Reader of register PIDR5"]
pub type R = crate::R<u32, super::PIDR5>;
#[doc = "Reader of field `PIDR5`"]
pub type PIDR5_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:31 - peripheral ID5"]
#[inline(always)]
pub fn pidr5(&self) -> PIDR5_R {
PIDR5_R::new((self.bits & 0xffff_ffff) as u32)
}
}
|
//! The world to be rendered.
use crate::aabb::AABB;
use crate::hittable::{HitRecord, Hittable};
use crate::ray::Ray;
use crate::vec3;
use crate::vec3::Vec3;
use rand::prelude::*;
/// The world that needs to be rendered, with all of its objects. Every object
/// needs to implement `Hittable`. Coincidentally, this struct *also* implements
/// `Hittable`.
#[derive(Default, Debug, Clone)]
pub struct World {
pub objects: Vec<Box<dyn Hittable>>,
}
impl World {
/// Create a new `World`, filled with the passed-in `objects`.
pub fn new(objects: Vec<Box<dyn Hittable>>) -> Self {
World { objects }
}
/// Add an object to the `World`.
pub fn add(&mut self, object: Box<dyn Hittable>) -> &mut Self {
self.objects.push(object);
self
}
}
impl Hittable for World {
fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
// We want to keep track of the closest-hit object. So, we intialize the
// closest value for `t` to `t_max`.
let mut closest_so_far = t_max;
let mut rec: Option<HitRecord> = None;
for object in &self.objects {
if let Some(obj_hit_rec) = object.hit(ray, t_min, closest_so_far) {
closest_so_far = obj_hit_rec.t;
rec = Some(obj_hit_rec);
}
}
rec
}
fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> {
if self.objects.is_empty() {
return None;
}
let mut first_box = true;
let mut temp_box: Option<AABB>;
let mut output_box = AABB::new(vec3!(), vec3!());
for object in &self.objects {
temp_box = object.bounding_box(t0, t1);
if let Some(temp_box) = temp_box {
output_box = if first_box {
temp_box
} else {
AABB::surrounding_box(output_box, temp_box)
};
first_box = false;
} else {
return None;
}
}
Some(output_box)
}
fn pdf_value(&self, origin: &Vec3, v: &Vec3) -> f32 {
let weight = 1.0 / self.objects.len() as f32;
let mut sum = 0.0;
for object in &self.objects {
sum += object.pdf_value(origin, v);
}
sum * weight
}
fn random(&self, origin: &Vec3) -> Vec3 {
// AAHHHH THIS IS BAD
let mut rng = thread_rng();
let index = rng.gen_range(0_u64, self.objects.len() as u64);
self.objects[index as usize].random(origin)
}
fn box_clone(&self) -> Box<dyn Hittable> {
Box::new(self.clone())
}
}
/// A convenience macro for more easily building `World`'s.
#[macro_export]
macro_rules! create_world {
($($object:expr),* $(,)?) => {
World::new(vec![
$(Box::new($object)),*
])
};
}
|
//#![feature(plugin)]
//#![plugin(bindgen_plugin)]
//#[allow(dead_code, uppercase_variables, non_camel_case_types)]
//#[plugin(bindgen_plugin)]
//mod mysql_bindings {
// bindgen!("/usr/include/mysql/mysql.h", match="mysql.h", link="mysql");
//}
//use std::env;
//use std::fs;
//use std::path::Path;
//use std::process::Command;
fn main() {
if let Some(datastax_dir) = option_env!("CASSANDRA_SYS_LIB_PATH") {
for p in datastax_dir.split(";") {
println!("cargo:rustc-link-search={}", p);
}
}
println!("cargo:rustc-flags=-l dylib=crypto");
println!("cargo:rustc-flags=-l dylib=ssl");
println!("cargo:rustc-flags=-l dylib=stdc++");
println!("cargo:rustc-flags=-l dylib=uv");
println!("cargo:rustc-link-search={}", "/usr/local/lib/x86_64-linux-gnu");
println!("cargo:rustc-link-search={}", "/usr/local/lib64");
println!("cargo:rustc-link-search={}", "/usr/local/lib");
println!("cargo:rustc-link-search={}", "/usr/lib64/");
println!("cargo:rustc-link-search={}", "/usr/lib/");
println!("cargo:rustc-link-lib=static=cassandra_static");
}
|
#[derive(PartialEq, PartialOrd, Clone, Copy)]
pub struct Hertz(pub u32);
#[derive(PartialEq, PartialOrd, Clone, Copy)]
pub struct KiloHertz(pub u32);
#[derive(PartialEq, PartialOrd, Clone, Copy)]
pub struct MegaHertz(pub u32);
// impl From<MegaHertz> for u32 {
// fn from(value: MegaHertz) -> u32 {
// value.0
// }
// }
impl From<u32> for MegaHertz {
fn from(value: u32) -> MegaHertz {
MegaHertz(value / 1_000_000)
}
}
impl From<u32> for KiloHertz {
fn from(value: u32) -> KiloHertz {
KiloHertz(value / 1_000)
}
}
impl From<u32> for Hertz {
fn from(value: u32) -> Hertz {
Hertz(value)
}
}
impl Into<Hertz> for KiloHertz {
fn into(self) -> Hertz {
Hertz(self.0 * 1_000)
}
}
impl Into<Hertz> for MegaHertz {
fn into(self) -> Hertz {
Hertz(self.0 * 1_000_000)
}
}
impl Into<KiloHertz> for MegaHertz {
fn into(self) -> KiloHertz {
KiloHertz(self.0 * 1_000)
}
}
|
pub type c_char = i8;
pub type size_t = usize;
#[repr(u8)]
pub enum c_void {
// Two dummy variants so the #[repr] attribute can be used.
#[doc(hidden)]
__variant1,
#[doc(hidden)]
__variant2,
}
|
use super::MarkSweep;
use crate::plan::barriers::NoBarrier;
use crate::plan::mutator_context::Mutator;
use crate::plan::mutator_context::MutatorConfig;
use crate::plan::AllocationSemantics as AllocationType;
use crate::util::alloc::allocators::AllocatorSelector;
use crate::util::alloc::allocators::Allocators;
use crate::util::OpaquePointer;
use crate::vm::VMBinding;
use crate::Plan;
use enum_map::enum_map;
use enum_map::EnumMap;
pub fn ms_mutator_prepare<VM: VMBinding>(_mutator: &mut Mutator<VM>, _tls: OpaquePointer) {
// Do nothing
}
pub fn ms_mutator_release<VM: VMBinding>(_mutator: &mut Mutator<VM>, _tls: OpaquePointer) {
// Do nothing
}
lazy_static! {
pub static ref ALLOCATOR_MAPPING: EnumMap<AllocationType, AllocatorSelector> = enum_map! {
AllocationType::Default => AllocatorSelector::Malloc(0),
AllocationType::Immortal | AllocationType::Code | AllocationType::ReadOnly => AllocatorSelector::BumpPointer(0),
AllocationType::Los => AllocatorSelector::LargeObject(0),
};
}
pub fn create_ms_mutator<VM: VMBinding>(
mutator_tls: OpaquePointer,
plan: &'static dyn Plan<VM = VM>,
) -> Mutator<VM> {
let ms = plan.downcast_ref::<MarkSweep<VM>>().unwrap();
let config = MutatorConfig {
allocator_mapping: &*ALLOCATOR_MAPPING,
space_mapping: box vec![
(AllocatorSelector::Malloc(0), ms.ms_space()),
(
AllocatorSelector::BumpPointer(0),
ms.common().get_immortal(),
),
(AllocatorSelector::LargeObject(0), ms.common().get_los()),
],
prepare_func: &ms_mutator_prepare,
release_func: &ms_mutator_release,
};
Mutator {
allocators: Allocators::<VM>::new(mutator_tls, plan, &config.space_mapping),
barrier: box NoBarrier,
mutator_tls,
config,
plan,
}
}
|
use num_traits::{Float};
use rand::Rng;
use rand_distr::Distribution;
use crate::*;
use super::*;
pub use rand_distr::StandardNormal;
/// Distribution that produces normalized Moebius transformation, i.e. `det() == 1`.
pub struct Normalized;
impl<U> Distribution<Moebius<U>> for StandardNormal where StandardNormal: Distribution<U> {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Moebius<U> {
Moebius::new(
rng.sample(Self), rng.sample(Self),
rng.sample(Self), rng.sample(Self),
)
}
}
impl<T: Float + Algebra, U: NormSqr<Output=T> + Clone> Distribution<Moebius<Construct<T, U>>> for Normalized where
StandardNormal: Distribution<Moebius<Construct<T, U>>>,
Construct<T, U>: Algebra<T>
{
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Moebius<Construct<T, U>> {
loop {
let m = rng.sample(&StandardNormal);
if m.det().norm() > T::epsilon() {
break m.normalize();
}
}
}
}
|
#![allow(dead_code, unused_variables)]
/*
There are three main types of smart pointers:
Box<T> for allocating values on the heap
Rc<T> is a reference counting type that enables multiple ownership
Ref<T> and RefMut<T>, accessed through RefCell<T> is a type that enforces borrowing rules at runtime.
Many libraries have their own smart pointers, and there can be our own custom implementations as well.
*/
// ########################################################################
// ############################### BOX ####################################
// ########################################################################
/*
at compile time rust needs to know how much space a type takes up. When this is uncertain a Box<T> is usefull
*/
// Recursive type
// use crate::List::{Cons, Nil};
// enum List {
// Cons(i32, List),
// Nil
// }
// the enum above won't compile because it holds itself. Nil indicates the end of the list but Rust will not know
// how much space to allocate for it resulting in an infinite size.
// the following code will compile
// enum List {
// Cons(i32, Box<List>),
// Nil
// }
// use crate::List::{Cons, Nil};
// fn main() {
// let list = Cons(1,
// Box::new(Cons(2,
// Box::new(Cons(3,
// Box::new(Nil))))) );
// }
// =========================================================
// ===== // SMART POINTERS IMPLEMENT THE DEREF TRAIT =======
// =========================================================
/*
let x = 5;
let y = &x;
*y this is how the value of y is dereferenced using the dereference operator *
Box<T> can be treated as a normal pointer
*/
// fn main() {
// let x = 5;
// let y = Box::new(5);
// println!("{}",*y); // prints 5
// }
// =========================================================
// =========== DEFINING OUR OWN SMART POINTER ==============
// =========================================================
// use std::ops::Deref;
// struct MyBox<T>(T);
// impl<T> MyBox<T> {
// fn new(x: T) -> MyBox<T> {
// MyBox(x)
// }
// }
// impl<T> Deref for MyBox<T> {
// type Target = T;
// fn deref(&self) -> &T {
// &self.0
// }
// }
// fn main() {
// let x = 10;
// let y = MyBox::new(x);
// println!("{}", *y); // prints 5
// // behind the scenes rust replaces the *y syntax with *(y.deref()) in other words *y call deref() for us
// }
// =========================================================
// =================== THE DROP TRAIT ======================
// =========================================================
/*
this trait lets us customize what happens when a value is about to go out of scope.
*/
// struct CustomSmartPointer {
// data: String,
// }
// impl Drop for CustomSmartPointer {
// fn drop(&mut self) { // this method can't be called directly
// println!("Dropping CustomSmartPointer with data `{}`!", self.data);
// }
// }
// fn main() {
// let c = CustomSmartPointer { data: String::from("my stuff") };
// // let d = CustomSmartPointer { data: String::from("other stuff") };
// println!("CustomSmartPointers created.");
// // ======== dropping a value before it goes out of scope ===== //
// std::mem::drop(c); // drop value earlies
// println!("CustomSmartPointer dropped before the end of main.");
// }
// ########################################################################
// ############## Rc<T> the Referece Counted Smart Pointer ################
// ########################################################################
/*
Rc stands for reference counting. It allows for multiple ownership by keeping track of the number
of references to a value which determines whether or not a value is still in use.
*/
enum List {
Cons(i32, Rc<List>),
Nil,
}
use crate::List::{Cons, Nil};
use std::rc::Rc;
fn main() {
let a = Rc::new(Cons(5, Rc::new(Cons(10, Rc::new(Nil)))));
println!("count after creating a = {}", Rc::strong_count(&a));
let b = Cons(3, Rc::clone(&a));
println!("count after creating b = {}", Rc::strong_count(&a));
{
let c = Cons(4, Rc::clone(&a));
println!("count after creating c = {}", Rc::strong_count(&a));
}
println!("count after c goes out of scope = {}", Rc::strong_count(&a));
}
/*
Via immutable references, Rc<T> allows you to share data between multiple parts of your program for reading only.
If Rc<T> allowed you to have multiple mutable references too, you might violate one of the borrowing rules:
-- multiple mutable borrows to the same place can cause data races and inconsistencies.
*/
|
use crate::errors::SortOrderError;
use anyhow::Result;
use regex::Regex;
use std::collections::HashMap;
use std::fs;
use std::path::MAIN_SEPARATOR;
#[derive(PartialEq)]
pub enum SortOrder {
Asc,
Desc,
}
pub enum Source {
Regex(Regex, usize, Option<usize>),
Map(Vec<(String, String)>),
Sort(SortOrder),
}
impl Source {
pub fn new_regex(
pattern: &str,
depth: Option<usize>,
max_depth: Option<usize>,
) -> Result<Self> {
Ok(Self::Regex(
Regex::new(pattern)?,
depth.unwrap_or(pattern.chars().filter(|c| *c == MAIN_SEPARATOR).count() + 1),
max_depth,
))
}
pub fn new_map(filename: &str) -> Result<Self> {
let contents = fs::read_to_string(filename)?;
Ok(Self::Map(serde_json::from_str(contents.as_str()).map(
|map: HashMap<String, String>| map.into_iter().collect(),
)?))
}
pub fn new_sort(order: &str) -> Result<Self> {
Ok(Self::Sort(match order.to_lowercase().as_str() {
"asc" => SortOrder::Asc,
"desc" => SortOrder::Desc,
_ => return Err(SortOrderError::new(order).into()),
}))
}
}
|
extern crate rustc_serialize;
use std::str::FromStr;
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Pet {
pub id: u32,
category: Category,
name: String,
photo_urls: Vec<String>,
tags: Vec<Tag>,
pub status: Status
}
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Category {
id: u32,
name: String
}
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Tag {
id: u32,
name: String
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)]
pub enum Status {
Available,
Pending,
Sold
}
impl FromStr for Status {
type Err = ();
fn from_str(s: &str) -> Result<Status, ()> {
match s {
"Available" => Ok(Status::Available),
"Pending" => Ok(Status::Pending),
"Sold" => Ok(Status::Sold),
_ => Err(()),
}
}
} |
use crate::TableSchema;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
/// Table implementation.
#[derive(Serialize, Deserialize, Clone)]
pub struct Table {
/// Table name.
pub name: String,
/// Table id.
pub id: u64,
/// Table schema.
pub schema: TableSchema,
}
impl Table {
// TODO: Replace hash of name with hash of absolute file path?
/// Creates a new table with the given name and heapfile.
///
/// # Arguments
///
/// * `name` - Name of table.
/// * `file` - HeapFile of the table.
pub fn new(name: String, schema: TableSchema) -> Self {
let table_id = Table::get_table_id(&name);
Table {
name,
id: table_id,
schema,
}
}
/// Creates table id of the table by hashing the table name.
///
/// # Arguments
///
/// * `name` - Name of table to get the id for.
pub fn get_table_id(name: &str) -> u64 {
let mut hasher = DefaultHasher::new();
name.hash(&mut hasher);
hasher.finish()
}
}
|
use super::*;
pub fn expression() -> Expression {
Expression {
boostrap_compiler,
typecheck,
codegen,
}
}
fn boostrap_compiler(_compiler: &mut Compiler) {}
fn typecheck(
resolver: &mut TypeResolver<TypecheckType>,
_: &TypevarFunction,
args: &Vec<TypeVar>,
) -> GenericResult<TypeVar> {
let type_var = resolver.create_type_var();
// this is not invoked, instead there
// is custom code for this one use case in the
// type annotator.
Ok(type_var)
}
pub fn codegen(context: &mut Context, args: &[Token]) -> CodegenResult<Object> {
if args.len() != 2 {
return Err(CodegenError::new(&format!(
"let function should only have two arguments. found {}: {:?}",
args.len(),
args
)));
};
let var_name = match &args[0] {
Token::Symbol(ref s) => s.clone(),
t => {
return Err(CodegenError::new(&format!(
"expected a symbol for the first argument. found {}",
t,
)));
}
};
let target = gen_token(context, &args[1])?;
let result_object = context
.scope
.locals
.entry(*var_name.clone())
.or_insert({
let object = context.function.objects;
context.function.objects += 1;
context.function.basic_blocks[context.block].add_instruction(
LLVMInstruction::BuildAlloca {
llvm_type: context.compiler.llvm.types.get(&target.object_type),
target: object,
},
);
Object::new(object, target.object_type.clone())
})
.clone();
context.add_instruction(LLVMInstruction::BuildStore {
source: target.index,
target: result_object.index,
});
Ok(result_object.clone())
}
|
use libc::c_char;
use utils::logger::{EnabledCB, FlushCB, LibvcxLogger, LibvcxDefaultLogger, LogCB, LOGGER_STATE, CVoid};
use utils::cstring::CStringUtils;
use utils::error::SUCCESS;
use log::LevelFilter;
use error::prelude::*;
/// Set default logger implementation.
///
/// Allows library user use `env_logger` logger as default implementation.
/// More details about `env_logger` and its customization can be found here: https://crates.io/crates/env_logger
///
/// #Params
/// pattern: (optional) pattern that corresponds with the log messages to show.
///
/// NOTE: You should specify either `pattern` parameter or `RUST_LOG` environment variable to init logger.
///
/// #Returns
/// u32 error code
#[no_mangle]
pub extern fn vcx_set_default_logger(pattern: *const c_char) -> u32 {
info!("vcx_set_default_logger >>>");
check_useful_opt_c_str!(pattern, VcxErrorKind::InvalidConfiguration);
trace!("vcx_set_default_logger(pattern: {:?})", pattern);
match LibvcxDefaultLogger::init(pattern) {
Ok(()) => {
debug!("Logger Successfully Initialized");
SUCCESS.code_num
}
Err(ec) => {
error!("Logger Failed To Initialize: {}", ec);
ec.into()
}
}
}
/// Set custom logger implementation.
///
/// Allows library user to provide custom logger implementation as set of handlers.
///
/// #Params
/// context: pointer to some logger context that will be available in logger handlers.
/// enabled: (optional) "enabled" operation handler - calls to determines if a log record would be logged. (false positive if not specified)
/// log: "log" operation handler - calls to logs a record.
/// flush: (optional) "flush" operation handler - calls to flushes buffered records (in case of crash or signal).
///
/// #Returns
/// u32 Error Code
#[no_mangle]
pub extern fn vcx_set_logger(context: *const CVoid,
enabled: Option<EnabledCB>,
log: Option<LogCB>,
flush: Option<FlushCB>) -> u32 {
info!("vcx_set_logger >>>");
trace!("vcx_set_logger( context: {:?}, enabled: {:?}, log: {:?}, flush: {:?}",
context, enabled, log, flush);
check_useful_c_callback!(log, VcxErrorKind::InvalidOption);
let res = LibvcxLogger::init(context, enabled, log, flush, None);
match res {
Ok(()) => {
debug!("Logger Successfully Initialized");
SUCCESS.code_num
}
Err(ec) => {
error!("Logger Failed To Initialize: {}", ec);
ec.into()
}
}
}
/// Set custom logger implementation.
///
/// Allows library user to provide custom logger implementation as set of handlers.
///
/// # Arguments
/// * `context` - pointer to some logger context that will be available in logger handlers.
/// * `enabled` - (optional) "enabled" operation handler - calls to determines if a log record would be logged. (false positive if not specified)
/// * `log` - "log" operation handler - calls to logs a record.
/// * `flush` - (optional) "flush" operation handler - calls to flushes buffered records (in case of crash or signal).
/// * `max_lvl` - Maximum log level represented as u32.
/// Possible values are from 0 to 5 inclusive: 0 - Off, 1 - Error, 2 - Warn, 3 - Info, 4 - Debug, 5 - Trace
///
/// # Returns
/// On success returns `ErrorCode::Success`
/// ErrorCode::CommonInvalidParam3 is returned in case of `log` callback is missed
/// ErrorCode::CommonInvalidParam5 is returned in case of `max_lvl` value is out of range [0-5]
#[no_mangle]
pub extern fn vcx_set_logger_with_max_lvl(context: *const CVoid,
enabled: Option<EnabledCB>,
log: Option<LogCB>,
flush: Option<FlushCB>,
max_lvl: u32) -> u32 {
trace!("vcx_set_logger_with_max_lvl >>> context: {:?}, enabled: {:?}, log: {:?}, flush: {:?}, max lvl {}", context, enabled, log, flush, max_lvl);
check_useful_c_callback!(log, VcxErrorKind::InvalidOption);
check_u32_less_or_eq!(max_lvl, LevelFilter::max() as usize as u32, VcxErrorKind::InvalidOption);
let result = LibvcxLogger::init(context, enabled, log, flush, Some(max_lvl));
let res = match result {
Ok(()) => {
debug!("Logger Successfully Initialized");
SUCCESS.code_num
}
Err(ec) => {
error!("Logger Failed To Initialize: {}", ec);
ec.into()
}
};
trace!("vcx_set_logger_with_max_lvl: <<< res: {:?}", res);
res
}
///
/// Set maximum log level
///
/// # Arguments
/// * `max_lvl` - Maximum log level represented as u32.
/// Possible values are from 0 to 5 inclusive: 0 - Off, 1 - Error, 2 - Warn, 3 - Trace, 4 - Debug, 5 - Trace
///
/// # Return
/// On success returns `ErrorCode::Success`
/// ErrorCode::CommonInvalidParam1 is returned in case of `max_lvl` value is out of range [0-5]
#[no_mangle]
pub extern fn vcx_set_log_max_lvl(max_lvl: u32) -> u32 {
trace!("vcx_set_log_max_lvl >>> max_lvl: {}", max_lvl);
check_u32_less_or_eq!(max_lvl, LevelFilter::max() as usize as u32, VcxErrorKind::InvalidOption);
let result = LibvcxLogger::set_max_level(max_lvl);
let res = match result {
Ok(_) => {
debug!("Max loglvl successfully set");
SUCCESS.code_num
}
Err(ec) => {
error!("Logger Failed To Initialize: {}", ec);
ec.into()
}
};
trace!("vcx_set_log_max_lvl: <<< res: {:?}", res);
res
}
/// Get the currently used logger.
///
/// NOTE: if logger is not set dummy implementation would be returned.
///
/// #Params
/// `context_p` - Reference that will contain logger context.
/// `enabled_cb_p` - Reference that will contain pointer to enable operation handler.
/// `log_cb_p` - Reference that will contain pointer to log operation handler.
/// `flush_cb_p` - Reference that will contain pointer to flush operation handler.
///
/// #Returns
/// Error code
///
/// This is tested in wrapper tests (python3)
#[no_mangle]
pub extern fn vcx_get_logger(context_p: *mut *const CVoid,
enabled_cb_p: *mut Option<EnabledCB>,
log_cb_p: *mut Option<LogCB>,
flush_cb_p: *mut Option<FlushCB>) -> u32 {
info!("vcx_get_logger >>>");
trace!("vcx_get_logger >>> context_p: {:?}, enabled_cb_p: {:?}, log_cb_p: {:?}, flush_cb_p: {:?}", context_p, enabled_cb_p, log_cb_p, flush_cb_p);
unsafe {
let (context, enabled_cb, log_cb, flush_cb) = LOGGER_STATE.get();
*context_p = context;
*enabled_cb_p = enabled_cb;
*log_cb_p = log_cb;
*flush_cb_p = flush_cb;
}
let res = SUCCESS.code_num;
trace!("vcx_get_logger: <<< res: {:?}", res);
res
}
|
mod test;
use std::collections::HashMap;
use std::fmt;
pub type Neighbour = Option<Box<Cell>>;
// Cell
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct Cell {
pub row: i32,
pub column: i32,
pub north: Neighbour,
pub south: Neighbour,
pub east: Neighbour,
pub west: Neighbour,
pub connections: HashMap<String, bool>,
}
impl Default for Cell {
fn default() -> Cell {
Cell { row: -1, column: -1, north: None, south: None, east: None, west: None, connections: HashMap::new() }
}
}
impl fmt::Display for Cell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {})", self.row, self.column)
}
}
impl Cell {
pub fn new(row: i32, column: i32) -> Cell {
Cell { row: row, column: column, connections: HashMap::new(), ..Default::default() }
}
pub fn identifier(&self) -> String {
format!("{}-{}", self.row, self.column)
}
pub fn link_cell(&mut self, mut cell: Cell, bidirectional: bool) {
self.connections.insert(cell.identifier(), true);
if bidirectional {
cell.connections.insert(self.identifier(), true);
}
}
pub fn is_linked_to(self, cell: Cell) -> bool {
self.connections.contains_key(&cell.identifier())
}
}
// Grid
#[derive(Eq, PartialEq)]
pub struct Grid {
pub rows: i32,
pub columns: i32,
pub cells: Vec<Vec<Cell>>
}
impl Default for Grid {
fn default() -> Grid {
Grid { rows: -1, columns: -1, cells: Vec::new() }
}
}
impl Grid {
/// Initializes a Grid.
///
/// Upon initialization, a grid will have its cells generated according to the rows and columns
/// values given to it.
pub fn new(rows: i32, columns: i32) -> Grid {
let mut grid = Grid { rows: rows, columns: columns, ..Default::default() };
grid.initialize_cells();
grid.connect_cells();
return grid;
}
/// Return an iterator for the grid.
pub fn iter(&self) -> GridIter {
GridIter { grid: self, row: 0, column: 0 }
}
pub fn retrieve_cell(&self, row: i32, column: i32) -> Option<Cell> {
let valid_row = row >= 0 && row < self.rows;
let valid_column = column >= 0 && column < self.columns;
if valid_row && valid_column {
let row_vector = &self.cells[row as usize];
Some(row_vector[column as usize].clone())
}
else {
None
}
}
/// Generates cells for an instance of Grid.
///
/// Cells are stored in an instance of Vec, and are accessed by row, then column.
/// Each cell is initialized with its row and column values set appropriately.
fn initialize_cells(&mut self) {
let mut cells: Vec<Vec<Cell>> = Vec::with_capacity(self.rows as usize);
for row in 0..self.rows {
let mut column_cells: Vec<Cell> = Vec::with_capacity(self.columns as usize);
for column in 0..self.columns {
let cell = Cell::new(row, column);
column_cells.push(cell);
}
cells.push(column_cells);
}
self.cells = cells;
}
fn connect_cells(&self) {
println!("Connecting cells");
}
}
// Grid Iterator
pub struct GridIter<'a> {
grid: &'a Grid,
row: i32,
column: i32,
}
impl<'a> Iterator for GridIter<'a> {
type Item = Cell;
fn next(&mut self) -> Option<Cell> {
let cell = self.grid.retrieve_cell(self.row, self.column);
self.row = self.row + 1;
self.column = self.column + 1;
// TODO: Clean up this garbage.
if self.row == self.grid.rows {
self.row = 0;
}
if self.column == self.grid.columns {
self.column = 0;
}
return cell;
}
}
|
//! Defines multihash codes for Subspace DSN.
use std::error::Error;
use subspace_core_primitives::PieceIndex;
/// Type alias for libp2p Multihash. Constant 64 was copied from libp2p protocols.
pub type Multihash = libp2p::multihash::Multihash<64>;
/// Start of Subspace Network multicodec namespace
/// https://github.com/multiformats/multicodec/blob/master/table.csv
const SUBSPACE_MULTICODEC_NAMESPACE_START: u64 = 0xb39910;
/// Subspace Network multihash codes.
#[derive(Debug, Clone, PartialEq)]
#[repr(u64)]
pub enum MultihashCode {
/// Piece index code.
PieceIndex = SUBSPACE_MULTICODEC_NAMESPACE_START,
}
impl From<MultihashCode> for u64 {
#[inline]
fn from(code: MultihashCode) -> Self {
code as u64
}
}
impl TryFrom<u64> for MultihashCode {
type Error = Box<dyn Error>;
#[inline]
fn try_from(value: u64) -> Result<Self, Self::Error> {
match value {
x if x == MultihashCode::PieceIndex as u64 => Ok(MultihashCode::PieceIndex),
_ => Err("Unexpected multihash code".into()),
}
}
}
/// Helper trait for converting to multihash.
pub trait ToMultihash {
/// Convert to multihash by the default multihash code.
fn to_multihash(&self) -> Multihash;
/// Convert to multihash by the specified multihash code.
fn to_multihash_by_code(&self, code: MultihashCode) -> Multihash;
}
impl ToMultihash for PieceIndex {
fn to_multihash(&self) -> Multihash {
self.to_multihash_by_code(MultihashCode::PieceIndex)
}
fn to_multihash_by_code(&self, code: MultihashCode) -> Multihash {
Multihash::wrap(u64::from(code), &self.to_bytes())
.expect("Input never exceeds allocated size; qed")
}
}
|
use crate::{BoxFuture, CtxTransaction, Result};
use std::fmt::Debug;
pub trait Entity: Send + Sync + 'static {
type Key: Send + Sync;
type TrackCtx: Debug + Sync;
fn track_insert<'a>(
&'a self,
_key: &'a Self::Key,
_old: Option<&'a Self>,
_ctx: &'a mut CtxTransaction,
_track: &'a Self::TrackCtx,
) -> BoxFuture<'a, Result<()>> {
box_future_ok()
}
fn track_remove<'a>(
&'a self,
_key: &'a Self::Key,
_ctx: &'a mut CtxTransaction,
_track: &'a Self::TrackCtx,
) -> BoxFuture<'a, Result<()>> {
box_future_ok()
}
}
#[cfg(feature = "cache")]
impl<T> Entity for cache::CacheIsland<T>
where
T: Entity,
{
type Key = T::Key;
type TrackCtx = T::TrackCtx;
}
impl<T> Entity for Option<T>
where
T: Entity,
{
type Key = T::Key;
type TrackCtx = T::TrackCtx;
}
fn box_future_ok() -> BoxFuture<'static, Result<()>> {
Box::pin(std::future::ready(Ok(())))
}
|
#[cfg(feature = "python")]
pub mod py;
mod test;
use crate::math::inverse_newton_raphson;
/// The Lennard-Jones link potential freely-jointed chain (Lennard-Jones-FJC) model thermodynamics in the isometric ensemble.
pub mod isometric;
/// The Lennard-Jones link potential freely-jointed chain (Lennard-Jones-FJC) model thermodynamics in the isotensional ensemble.
pub mod isotensional;
/// The structure of the Lennard-Jones-FJC model thermodynamics.
pub struct LENNARDJONESFJC
{
/// The mass of each hinge in the chain in units of kg/mol.
pub hinge_mass: f64,
/// The length of each link in the chain in units of nm.
pub link_length: f64,
/// The number of links in the chain.
pub number_of_links: u8,
/// The stiffness of each link in the chain in units of J/(mol⋅nm^2).
pub link_stiffness: f64,
/// The thermodynamic functions of the model in the isometric ensemble.
pub isometric: self::isometric::LENNARDJONESFJC,
/// The thermodynamic functions of the model in the isotensional ensemble.
pub isotensional: self::isotensional::LENNARDJONESFJC
}
/// The implemented functionality of the Lennard-Jones-FJC model thermodynamics.
impl LENNARDJONESFJC
{
/// Initializes and returns an instance of the Lennard-Jones-FJC model thermodynamics.
pub fn init(number_of_links: u8, link_length: f64, hinge_mass: f64, link_stiffness: f64) -> Self
{
LENNARDJONESFJC
{
hinge_mass,
link_length,
number_of_links,
link_stiffness,
isometric: self::isometric::LENNARDJONESFJC::init(number_of_links, link_length, hinge_mass, link_stiffness),
isotensional: self::isotensional::LENNARDJONESFJC::init(number_of_links, link_length, hinge_mass, link_stiffness)
}
}
}
fn nondimensional_link_stretch(nondimensional_link_stiffness: &f64, nondimensional_force: &f64) -> f64
{
inverse_newton_raphson(&(6.0*nondimensional_force/nondimensional_link_stiffness), &|x: &f64| x.powi(-7) - x.powi(-13), &|x: &f64| 13.0*x.powi(-14) - 7.0*x.powi(-8), &1.0, &1e-6, &100)
}
|
use nix::poll::*;
use nix::unistd::{write, pipe};
#[test]
fn test_poll() {
let (r, w) = pipe().unwrap();
let mut fds = [PollFd::new(r, POLLIN, EventFlags::empty())];
let nfds = poll(&mut fds, 100).unwrap();
assert_eq!(nfds, 0);
assert!(!fds[0].revents().unwrap().contains(POLLIN));
write(w, b".").unwrap();
let nfds = poll(&mut fds, 100).unwrap();
assert_eq!(nfds, 1);
assert!(fds[0].revents().unwrap().contains(POLLIN));
}
|
//! This modules contains both the `static_loader` and `ArcLoader`
//! implementations, as well as the `Loader` trait. Which provides a loader
//! agnostic interface.
#[cfg(feature = "handlebars")]
mod handlebars;
#[cfg(feature = "tera")]
mod tera;
mod shared;
use std::collections::HashMap;
use fluent_bundle::concurrent::FluentBundle;
use fluent_bundle::{FluentArgs, FluentResource, FluentValue};
use fluent_langneg::negotiate_languages;
pub use unic_langid::{langid, langids, LanguageIdentifier};
mod arc_loader;
mod static_loader;
pub use arc_loader::{ArcLoader, ArcLoaderBuilder};
pub use static_loader::StaticLoader;
/// A loader capable of looking up Fluent keys given a language.
pub trait Loader {
/// Look up `text_id` for `lang` in Fluent.
fn lookup(&self, lang: &LanguageIdentifier, text_id: &str) -> String {
self.lookup_complete::<&str>(lang, text_id, None)
}
/// Look up `text_id` for `lang` with `args` in Fluent.
fn lookup_with_args<T: AsRef<str>>(
&self,
lang: &LanguageIdentifier,
text_id: &str,
args: &HashMap<T, FluentValue>,
) -> String {
self.lookup_complete(lang, text_id, Some(args))
}
/// Look up `text_id` for `lang` in Fluent, using any `args` if provided.
fn lookup_complete<T: AsRef<str>>(
&self,
lang: &LanguageIdentifier,
text_id: &str,
args: Option<&HashMap<T, FluentValue>>,
) -> String;
/// Returns an Iterator over the locales that are present.
fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_>;
}
impl<L> Loader for std::sync::Arc<L>
where
L: Loader,
{
fn lookup_complete<T: AsRef<str>>(
&self,
lang: &LanguageIdentifier,
text_id: &str,
args: Option<&HashMap<T, FluentValue>>,
) -> String {
L::lookup_complete(self, lang, text_id, args)
}
fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_> {
L::locales(self)
}
}
impl<'a, L> Loader for &'a L
where
L: Loader,
{
fn lookup_complete<T: AsRef<str>>(
&self,
lang: &LanguageIdentifier,
text_id: &str,
args: Option<&HashMap<T, FluentValue>>,
) -> String {
L::lookup_complete(self, lang, text_id, args)
}
fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_> {
L::locales(self)
}
}
/// A `Loader` agnostic container type with optional trait implementations
/// for integrating with different libraries.
pub struct FluentLoader<L> {
loader: L,
#[allow(unused)]
default_lang: Option<LanguageIdentifier>,
}
impl<L> FluentLoader<L> {
/// Create a new `FluentLoader`.
pub fn new(loader: L) -> Self {
Self {
loader,
default_lang: None,
}
}
/// Set default language for this `FluentLoader`.
/// Template engines can use this value when rendering translations.
/// So far this feature is only implemented for Tera.
pub fn with_default_lang(self, lang: LanguageIdentifier) -> Self {
Self {
loader: self.loader,
default_lang: Some(lang),
}
}
}
/// Constructs a map of languages with a list of potential fallback languages.
pub fn build_fallbacks(
locales: &[LanguageIdentifier],
) -> HashMap<LanguageIdentifier, Vec<LanguageIdentifier>> {
let mut map = HashMap::new();
for locale in locales.iter() {
map.insert(
locale.to_owned(),
negotiate_languages(
&[locale],
locales,
None,
fluent_langneg::NegotiationStrategy::Filtering,
)
.into_iter()
.cloned()
.collect::<Vec<_>>(),
);
}
map
}
/// Creates a new static `FluentBundle` for `lang` using `resources`. Optionally
/// shared resources can be specified with `core_resource` and the bundle can
/// be customized with `customizer`.
fn create_bundle(
lang: LanguageIdentifier,
resources: &'static [FluentResource],
core_resource: Option<&'static FluentResource>,
customizer: &impl Fn(&mut FluentBundle<&'static FluentResource>),
) -> FluentBundle<&'static FluentResource> {
let mut bundle: FluentBundle<&'static FluentResource> = FluentBundle::new(vec![lang]);
if let Some(core) = core_resource {
bundle
.add_resource(core)
.expect("Failed to add core resource to bundle");
}
for res in resources {
bundle
.add_resource(res)
.expect("Failed to add FTL resources to the bundle.");
}
customizer(&mut bundle);
bundle
}
/// Maps from map of languages containing a list of resources to a map of
/// languages containing a `FluentBundle` of those resources.
pub fn build_bundles(
resources: &'static HashMap<LanguageIdentifier, Vec<FluentResource>>,
core_resource: Option<&'static FluentResource>,
customizer: impl Fn(&mut FluentBundle<&'static FluentResource>),
) -> HashMap<LanguageIdentifier, FluentBundle<&'static FluentResource>> {
let mut bundles = HashMap::new();
for (k, ref v) in resources.iter() {
bundles.insert(
k.clone(),
create_bundle(k.clone(), &v, core_resource, &customizer),
);
}
bundles
}
fn map_to_fluent_args<'map, T: AsRef<str>>(
map: Option<&'map HashMap<T, FluentValue>>,
) -> Option<FluentArgs<'map>> {
let mut new = FluentArgs::new();
if let Some(map) = map {
for (key, value) in map {
new.add(key.as_ref(), value.clone());
}
}
Some(new)
}
|
use crate::bond::deposit_farm_share;
use crate::contract::{handle, init, query};
use crate::mock_querier::{mock_dependencies, WasmMockQuerier};
use crate::state::read_config;
use cosmwasm_std::testing::{mock_env, MockApi, MockStorage, MOCK_CONTRACT_ADDR};
use cosmwasm_std::{
from_binary, to_binary, CosmosMsg, Decimal, Extern, HumanAddr, Uint128, WasmMsg,
};
use cw20::{Cw20HandleMsg, Cw20ReceiveMsg};
use mirror_protocol::gov::HandleMsg as MirrorGovHandleMsg;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use spectrum_protocol::gov::HandleMsg as GovHandleMsg;
use spectrum_protocol::mirror_farm::{
ConfigInfo, Cw20HookMsg, HandleMsg, HandleMsg as MirrorStakingHandleMsg, PoolItem,
PoolsResponse, QueryMsg, StateInfo,
};
use std::fmt::Debug;
const SPEC_GOV: &str = "spec_gov";
const SPEC_TOKEN: &str = "spec_token";
const MIR_GOV: &str = "mir_gov";
const MIR_TOKEN: &str = "mir_token";
const MIR_STAKING: &str = "mir_staking";
const TERRA_SWAP: &str = "terra_swap";
const TEST_CREATOR: &str = "creator";
const USER1: &str = "user1";
const USER2: &str = "user2";
const USER3: &str = "user3";
const MIR_LP: &str = "mir_lp";
const SPY_TOKEN: &str = "spy_token";
const SPY_LP: &str = "spy_lp";
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct RewardInfoResponse {
pub staker_addr: HumanAddr,
pub reward_infos: Vec<RewardInfoResponseItem>,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct RewardInfoResponseItem {
pub asset_token: HumanAddr,
pub bond_amount: Uint128,
pub auto_bond_amount: Uint128,
pub stake_bond_amount: Uint128,
pub pending_farm_reward: Uint128,
pub pending_spec_reward: Uint128,
pub accum_spec_share: Uint128,
}
#[test]
fn test() {
let mut deps = mock_dependencies(20, &[]);
deps.querier.with_balance_percent(100);
let _ = test_config(&mut deps);
test_register_asset(&mut deps);
test_bond(&mut deps);
test_deposit_fee(&mut deps);
test_staked_reward(&mut deps);
}
fn test_config(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) -> ConfigInfo {
// test init & read config & read state
let env = mock_env(TEST_CREATOR, &[]);
let mut config = ConfigInfo {
owner: HumanAddr::from(TEST_CREATOR),
spectrum_gov: HumanAddr::from(SPEC_GOV),
spectrum_token: HumanAddr::from(SPEC_TOKEN),
mirror_gov: HumanAddr::from(MIR_GOV),
mirror_token: HumanAddr::from(MIR_TOKEN),
mirror_staking: HumanAddr::from(MIR_STAKING),
terraswap_factory: HumanAddr::from(TERRA_SWAP),
platform: Option::None,
controller: Option::None,
base_denom: "uusd".to_string(),
community_fee: Decimal::zero(),
platform_fee: Decimal::zero(),
controller_fee: Decimal::zero(),
deposit_fee: Decimal::zero(),
lock_start: 0u64,
lock_end: 0u64,
};
// success init
let res = init(deps, env.clone(), config.clone());
assert!(res.is_ok());
// read config
let msg = QueryMsg::config {};
let res: ConfigInfo = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(res, config.clone());
// read state
let msg = QueryMsg::state {};
let res: StateInfo = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res,
StateInfo {
previous_spec_share: Uint128::zero(),
total_farm_share: Uint128::zero(),
total_weight: 0u32,
spec_share_index: Decimal::zero(),
}
);
// alter config, validate owner
let env = mock_env(SPEC_GOV, &[]);
let msg = HandleMsg::update_config {
owner: Some(HumanAddr::from(SPEC_GOV)),
platform: None,
controller: None,
community_fee: None,
platform_fee: None,
controller_fee: None,
deposit_fee: None,
lock_start: None,
lock_end: None,
};
let res = handle(deps, env.clone(), msg.clone());
assert!(res.is_err());
// success
let env = mock_env(TEST_CREATOR, &[]);
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
let msg = QueryMsg::config {};
let res: ConfigInfo = from_binary(&query(deps, msg).unwrap()).unwrap();
config.owner = HumanAddr::from(SPEC_GOV);
assert_eq!(res, config.clone());
config
}
fn test_register_asset(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) {
// no permission
let env = mock_env(TEST_CREATOR, &[]);
let msg = HandleMsg::register_asset {
asset_token: HumanAddr::from(MIR_TOKEN),
staking_token: HumanAddr::from(MIR_LP),
weight: 1u32,
auto_compound: true,
};
let res = handle(deps, env.clone(), msg.clone());
assert!(res.is_err());
// success
let env = mock_env(SPEC_GOV, &[]);
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
// query pool info
let msg = QueryMsg::pools {};
let res: PoolsResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res,
PoolsResponse {
pools: vec![PoolItem {
asset_token: HumanAddr::from(MIR_TOKEN),
staking_token: HumanAddr::from(MIR_LP),
weight: 1u32,
auto_compound: true,
farm_share: Uint128::zero(),
state_spec_share_index: Decimal::zero(),
stake_spec_share_index: Decimal::zero(),
auto_spec_share_index: Decimal::zero(),
farm_share_index: Decimal::zero(),
total_stake_bond_amount: Uint128::zero(),
total_stake_bond_share: Uint128::zero(),
total_auto_bond_share: Uint128::zero(),
reinvest_allowance: Uint128::zero(),
}]
}
);
// vault2
let msg = HandleMsg::register_asset {
asset_token: HumanAddr::from(SPY_TOKEN),
staking_token: HumanAddr::from(SPY_LP),
weight: 2u32,
auto_compound: true,
};
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
// read state
let msg = QueryMsg::state {};
let res: StateInfo = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(res.total_weight, 3u32);
}
fn test_bond(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) {
// bond err
let env = mock_env(TEST_CREATOR, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER1),
amount: Uint128::from(10000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(MIR_TOKEN),
compound_rate: Some(Decimal::percent(60)),
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg.clone());
assert!(res.is_err());
// bond success
let env = mock_env(MIR_LP, &[]);
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
let config = read_config(&deps.storage).unwrap();
deposit_farm_share(
deps,
&config,
vec![(HumanAddr::from(MIR_TOKEN), Uint128::from(1000u128))],
)
.unwrap();
deps.querier.with_token_balances(&[
(
&HumanAddr::from(MIR_STAKING),
&[
(&HumanAddr::from(MIR_TOKEN), &Uint128::from(12000u128)),
(&HumanAddr::from(SPY_TOKEN), &Uint128::from(5000u128)),
],
),
(
&HumanAddr::from(MIR_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(1000u128),
)],
),
(
&HumanAddr::from(SPEC_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(2700u128),
)],
),
]);
// query balance
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER1),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(1000u128),
pending_spec_reward: Uint128::from(900u128),
bond_amount: Uint128::from(12000u128),
auto_bond_amount: Uint128::from(8000u128),
stake_bond_amount: Uint128::from(4000u128),
accum_spec_share: Uint128::from(900u128),
},]
);
// bond SPY
let env = mock_env(SPY_LP, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER1),
amount: Uint128::from(4000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(SPY_TOKEN),
compound_rate: Some(Decimal::percent(50)),
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg.clone());
assert!(res.is_ok());
// unbond
let env = mock_env(USER1, &[]);
let msg = HandleMsg::unbond {
asset_token: HumanAddr::from(MIR_TOKEN),
amount: Uint128::from(3000u128),
};
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
assert_eq!(
res.unwrap().messages,
[
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_STAKING),
send: vec![],
msg: to_binary(&MirrorStakingHandleMsg::unbond {
amount: Uint128::from(3000u128),
asset_token: HumanAddr::from(MIR_TOKEN),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_LP),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER1),
amount: Uint128::from(3000u128),
})
.unwrap(),
}),
]
);
// withdraw
let msg = HandleMsg::withdraw { asset_token: None };
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
assert_eq!(
res.unwrap().messages,
vec![
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(SPEC_GOV),
send: vec![],
msg: to_binary(&GovHandleMsg::withdraw {
amount: Some(Uint128::from(2700u128)),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(SPEC_TOKEN),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER1),
amount: Uint128::from(2700u128),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_GOV),
send: vec![],
msg: to_binary(&MirrorGovHandleMsg::WithdrawVotingTokens {
amount: Some(Uint128::from(1000u128)),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_TOKEN),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER1),
amount: Uint128::from(1000u128),
})
.unwrap(),
}),
]
);
deposit_farm_share(
deps,
&config,
vec![
(HumanAddr::from(MIR_TOKEN), Uint128::from(500u128)),
(HumanAddr::from(SPY_TOKEN), Uint128::from(1000u128)),
],
)
.unwrap();
deps.querier.with_token_balances(&[
(
&HumanAddr::from(MIR_STAKING),
&[
(&HumanAddr::from(MIR_TOKEN), &Uint128::from(10000u128)),
(&HumanAddr::from(SPY_TOKEN), &Uint128::from(6000u128)),
],
),
(
&HumanAddr::from(MIR_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(3000u128),
)],
),
(
&HumanAddr::from(SPEC_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(1800u128),
)],
),
]);
// query balance
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER1),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(998u128),
pending_spec_reward: Uint128::from(599u128),
bond_amount: Uint128::from(10000u128),
auto_bond_amount: Uint128::from(7000u128),
stake_bond_amount: Uint128::from(3000u128),
accum_spec_share: Uint128::from(1499u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(2000u128),
pending_spec_reward: Uint128::from(1200u128),
bond_amount: Uint128::from(6000u128),
auto_bond_amount: Uint128::from(4000u128),
stake_bond_amount: Uint128::from(2000u128),
accum_spec_share: Uint128::from(3000u128),
},
]
);
// bond user2
let env = mock_env(MIR_LP, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER2),
amount: Uint128::from(5000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(MIR_TOKEN),
compound_rate: None,
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
let env = mock_env(SPY_LP, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER2),
amount: Uint128::from(4000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(SPY_TOKEN),
compound_rate: None,
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
deposit_farm_share(
deps,
&config,
vec![
(HumanAddr::from(MIR_TOKEN), Uint128::from(4000u128)),
(HumanAddr::from(SPY_TOKEN), Uint128::from(7200u128)),
],
)
.unwrap();
deps.querier.with_token_balances(&[
(
&HumanAddr::from(MIR_STAKING),
&[
(&HumanAddr::from(MIR_TOKEN), &Uint128::from(16000u128)),
(&HumanAddr::from(SPY_TOKEN), &Uint128::from(12000u128)),
],
),
(
&HumanAddr::from(MIR_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(14200u128),
)],
),
(
&HumanAddr::from(SPEC_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(16200u128),
)],
),
]);
/*
UNIT: 100
MIR balance: 142 = existing MIR: 30 + new MIR: 112
new MIR: 112 (MIR pool = 40, SPY pool = 72)
new MIR on MIR pool: 40 (USER1 = 15, USER2 = 25) from stake USER1 = 30 & USER2 = 50
new MIR on SPY pool: 72 (USER1 = 24, USER2 = 48) from stake USER1 = 20 & USER2 = 40
USER1 MIR total: 25 (existing MIR = 10, new on MIR pool = 15)
USER1 SPY total: 44 (existing MIR = 20, new on SPY pool = 24)
USER2 MIR total: 25 (new on MIR pool = 25)
USER2 SPY total: 48 (new on SPY pool = 48)
existing SPEC: 18 (MIR pool = 6, SPY pool = 12)
SPEC balance: 162 - existing SPEC: 18 = new SPEC: 144
new SPEC: 144 (MIR pool = 48, SPY pool = 96) from weight MIR pool = 1 & SPY pool = 2
new SPEC on MIR pool: 48 (USER1 = 33, USER2 = 15) from bond USER1 = 110 & USER2 = 50
new SPEC on SPY pool: 96 (USER1 = 64, USER2 = 32) from bond USER1 = 80 & USER2 = 40
USER1 MIR total: 39 (existing MIR = 6, new on MIR pool = 33)
USER1 SPY total: 76 (existing MIR = 12, new on SPY pool = 64)
USER2 MIR total: 15 (new on MIR pool = 15)
USER2 SPY total: 32 (new on SPY pool = 32)
*/
// query balance1
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER1),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(2498u128),
pending_spec_reward: Uint128::from(3899u128),
bond_amount: Uint128::from(11000u128),
auto_bond_amount: Uint128::from(8000u128),
stake_bond_amount: Uint128::from(3000u128),
accum_spec_share: Uint128::from(4799u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(4400u128),
pending_spec_reward: Uint128::from(7600u128),
bond_amount: Uint128::from(8000u128),
auto_bond_amount: Uint128::from(6000u128),
stake_bond_amount: Uint128::from(2000u128),
accum_spec_share: Uint128::from(9400u128),
},
]
);
// query balance2
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER2),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(2500u128),
pending_spec_reward: Uint128::from(1500u128),
bond_amount: Uint128::from(5000u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(5000u128),
accum_spec_share: Uint128::from(1500u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(4800u128),
pending_spec_reward: Uint128::from(3200u128),
bond_amount: Uint128::from(4000u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(4000u128),
accum_spec_share: Uint128::from(3200u128),
},
]
);
}
fn test_deposit_fee(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) {
// alter config, deposit fee
let env = mock_env(SPEC_GOV, &[]);
let msg = HandleMsg::update_config {
owner: None,
platform: None,
controller: None,
community_fee: None,
platform_fee: None,
controller_fee: None,
deposit_fee: Some(Decimal::percent(20)),
lock_start: None,
lock_end: None,
};
let res = handle(deps, env.clone(), msg.clone());
assert!(res.is_ok());
// bond user3
let env = mock_env(MIR_LP, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER3),
amount: Uint128::from(80000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(MIR_TOKEN),
compound_rate: Some(Decimal::percent(50)),
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
let env = mock_env(SPY_LP, &[]);
let msg = HandleMsg::receive(Cw20ReceiveMsg {
sender: HumanAddr::from(USER3),
amount: Uint128::from(60000u128),
msg: Some(
to_binary(&Cw20HookMsg::bond {
staker_addr: None,
asset_token: HumanAddr::from(SPY_TOKEN),
compound_rate: Some(Decimal::percent(50)),
})
.unwrap(),
),
});
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
deps.querier.with_token_balances(&[
(
&HumanAddr::from(MIR_STAKING),
&[
(&HumanAddr::from(MIR_TOKEN), &Uint128::from(96000u128)),
(&HumanAddr::from(SPY_TOKEN), &Uint128::from(72000u128)),
],
),
(
&HumanAddr::from(MIR_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(14200u128),
)],
),
(
&HumanAddr::from(SPEC_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(16200u128),
)],
),
]);
// query balance1
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER1),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(2498u128),
pending_spec_reward: Uint128::from(3899u128),
bond_amount: Uint128::from(13200u128),
auto_bond_amount: Uint128::from(9600u128),
stake_bond_amount: Uint128::from(3600u128),
accum_spec_share: Uint128::from(4799u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(4400u128),
pending_spec_reward: Uint128::from(7600u128),
bond_amount: Uint128::from(9600u128),
auto_bond_amount: Uint128::from(7200u128),
stake_bond_amount: Uint128::from(2400u128),
accum_spec_share: Uint128::from(9400u128),
},
]
);
// query balance2
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER2),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(2500u128),
pending_spec_reward: Uint128::from(1500u128),
bond_amount: Uint128::from(6000u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(6000u128),
accum_spec_share: Uint128::from(1500u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(4800u128),
pending_spec_reward: Uint128::from(3200u128),
bond_amount: Uint128::from(4800u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(4800u128),
accum_spec_share: Uint128::from(3200u128),
},
]
);
// query balance3
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER3),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::zero(),
pending_spec_reward: Uint128::zero(),
bond_amount: Uint128::from(76800u128),
auto_bond_amount: Uint128::from(38400u128),
stake_bond_amount: Uint128::from(38400u128),
accum_spec_share: Uint128::zero(),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::zero(),
pending_spec_reward: Uint128::zero(),
bond_amount: Uint128::from(57600u128),
auto_bond_amount: Uint128::from(28800u128),
stake_bond_amount: Uint128::from(28800u128),
accum_spec_share: Uint128::zero(),
},
]
);
}
fn test_staked_reward(deps: &mut Extern<MockStorage, MockApi, WasmMockQuerier>) {
// unbond user1
let env = mock_env(USER1, &[]);
let msg = HandleMsg::unbond {
asset_token: HumanAddr::from(MIR_TOKEN),
amount: Uint128::from(13200u128),
};
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
assert_eq!(
res.unwrap().messages,
[
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_STAKING),
send: vec![],
msg: to_binary(&MirrorStakingHandleMsg::unbond {
amount: Uint128::from(13200u128),
asset_token: HumanAddr::from(MIR_TOKEN),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_LP),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER1),
amount: Uint128::from(13200u128),
})
.unwrap(),
}),
]
);
// withdraw for user2
let env = mock_env(USER2, &[]);
let msg = HandleMsg::withdraw { asset_token: None };
let res = handle(deps, env.clone(), msg);
assert!(res.is_ok());
assert_eq!(
res.unwrap().messages,
vec![
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(SPEC_GOV),
send: vec![],
msg: to_binary(&GovHandleMsg::withdraw {
amount: Some(Uint128::from(4700u128)),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(SPEC_TOKEN),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER2),
amount: Uint128::from(4700u128),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_GOV),
send: vec![],
msg: to_binary(&MirrorGovHandleMsg::WithdrawVotingTokens {
amount: Some(Uint128::from(7300u128)),
})
.unwrap(),
}),
CosmosMsg::Wasm(WasmMsg::Execute {
contract_addr: HumanAddr::from(MIR_TOKEN),
send: vec![],
msg: to_binary(&Cw20HandleMsg::Transfer {
recipient: HumanAddr::from(USER2),
amount: Uint128::from(7300u128),
})
.unwrap(),
}),
]
);
deps.querier.with_balance_percent(120);
deps.querier.with_token_balances(&[
(
&HumanAddr::from(MIR_STAKING),
&[
(&HumanAddr::from(MIR_TOKEN), &Uint128::from(90000u128)),
(&HumanAddr::from(SPY_TOKEN), &Uint128::from(72000u128)),
],
),
(
&HumanAddr::from(MIR_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(9200u128),
)],
),
(
&HumanAddr::from(SPEC_GOV),
&[(
&HumanAddr::from(MOCK_CONTRACT_ADDR),
&Uint128::from(24600u128), //+9000 +20%
)],
),
]);
// query balance1 (still earn gov income even there is no bond)
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER1),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(3330u128), //+33%
pending_spec_reward: Uint128::from(4678u128), //+20%
bond_amount: Uint128::from(0u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(0u128),
accum_spec_share: Uint128::from(4799u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(5866u128), //+33%
pending_spec_reward: Uint128::from(10080u128), //+800+20%
bond_amount: Uint128::from(9600u128),
auto_bond_amount: Uint128::from(7200u128),
stake_bond_amount: Uint128::from(2400u128),
accum_spec_share: Uint128::from(10200u128),
},
]
);
// query balance2
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER2),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::from(0u128),
pending_spec_reward: Uint128::from(240u128), //+200+20%
bond_amount: Uint128::from(6000u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(6000u128),
accum_spec_share: Uint128::from(1700u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::from(0u128),
pending_spec_reward: Uint128::from(480u128), //+400+20%
bond_amount: Uint128::from(4800u128),
auto_bond_amount: Uint128::from(0u128),
stake_bond_amount: Uint128::from(4800u128),
accum_spec_share: Uint128::from(3600u128),
},
]
);
// query balance3
let msg = QueryMsg::reward_info {
staker_addr: HumanAddr::from(USER3),
asset_token: None,
height: 0u64,
};
let res: RewardInfoResponse = from_binary(&query(deps, msg).unwrap()).unwrap();
assert_eq!(
res.reward_infos,
vec![
RewardInfoResponseItem {
asset_token: HumanAddr::from(MIR_TOKEN),
pending_farm_reward: Uint128::zero(),
pending_spec_reward: Uint128::from(3358u128), //+2799+20%
bond_amount: Uint128::from(84000u128),
auto_bond_amount: Uint128::from(45600u128),
stake_bond_amount: Uint128::from(38400u128),
accum_spec_share: Uint128::from(2799u128),
},
RewardInfoResponseItem {
asset_token: HumanAddr::from(SPY_TOKEN),
pending_farm_reward: Uint128::zero(),
pending_spec_reward: Uint128::from(5760u128), //+4800+20%
bond_amount: Uint128::from(57600u128),
auto_bond_amount: Uint128::from(28800u128),
stake_bond_amount: Uint128::from(28800u128),
accum_spec_share: Uint128::from(4800u128),
},
]
);
}
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
use serde::de::{Deserialize, Deserializer};
use std::fmt::Debug;
/// TRAIT | Describes the type of a URL linking to an internal Telegram entity
pub trait TDTMeUrlType: Debug + RObject {}
/// Describes the type of a URL linking to an internal Telegram entity
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub enum TMeUrlType {
#[doc(hidden)]
_Default(()),
/// A chat invite link
ChatInvite(TMeUrlTypeChatInvite),
/// A URL linking to a sticker set
StickerSet(TMeUrlTypeStickerSet),
/// A URL linking to a public supergroup or channel
Supergroup(TMeUrlTypeSupergroup),
/// A URL linking to a user
User(TMeUrlTypeUser),
}
impl Default for TMeUrlType {
fn default() -> Self {
TMeUrlType::_Default(())
}
}
impl<'de> Deserialize<'de> for TMeUrlType {
fn deserialize<D>(deserializer: D) -> Result<TMeUrlType, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error;
rtd_enum_deserialize!(
TMeUrlType,
(tMeUrlTypeChatInvite, ChatInvite);
(tMeUrlTypeStickerSet, StickerSet);
(tMeUrlTypeSupergroup, Supergroup);
(tMeUrlTypeUser, User);
)(deserializer)
}
}
impl RObject for TMeUrlType {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
match self {
TMeUrlType::ChatInvite(t) => t.td_name(),
TMeUrlType::StickerSet(t) => t.td_name(),
TMeUrlType::Supergroup(t) => t.td_name(),
TMeUrlType::User(t) => t.td_name(),
_ => "-1",
}
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
match self {
TMeUrlType::ChatInvite(t) => t.extra(),
TMeUrlType::StickerSet(t) => t.extra(),
TMeUrlType::Supergroup(t) => t.extra(),
TMeUrlType::User(t) => t.extra(),
_ => None,
}
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl TMeUrlType {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
#[doc(hidden)]
pub fn _is_default(&self) -> bool {
matches!(self, TMeUrlType::_Default(_))
}
}
impl AsRef<TMeUrlType> for TMeUrlType {
fn as_ref(&self) -> &TMeUrlType {
self
}
}
/// A chat invite link
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TMeUrlTypeChatInvite {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// Chat invite link info
info: ChatInviteLinkInfo,
}
impl RObject for TMeUrlTypeChatInvite {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"tMeUrlTypeChatInvite"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl TDTMeUrlType for TMeUrlTypeChatInvite {}
impl TMeUrlTypeChatInvite {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDTMeUrlTypeChatInviteBuilder {
let mut inner = TMeUrlTypeChatInvite::default();
inner.td_name = "tMeUrlTypeChatInvite".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDTMeUrlTypeChatInviteBuilder { inner }
}
pub fn info(&self) -> &ChatInviteLinkInfo {
&self.info
}
}
#[doc(hidden)]
pub struct RTDTMeUrlTypeChatInviteBuilder {
inner: TMeUrlTypeChatInvite,
}
impl RTDTMeUrlTypeChatInviteBuilder {
pub fn build(&self) -> TMeUrlTypeChatInvite {
self.inner.clone()
}
pub fn info<T: AsRef<ChatInviteLinkInfo>>(&mut self, info: T) -> &mut Self {
self.inner.info = info.as_ref().clone();
self
}
}
impl AsRef<TMeUrlTypeChatInvite> for TMeUrlTypeChatInvite {
fn as_ref(&self) -> &TMeUrlTypeChatInvite {
self
}
}
impl AsRef<TMeUrlTypeChatInvite> for RTDTMeUrlTypeChatInviteBuilder {
fn as_ref(&self) -> &TMeUrlTypeChatInvite {
&self.inner
}
}
/// A URL linking to a sticker set
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TMeUrlTypeStickerSet {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// Identifier of the sticker set
sticker_set_id: isize,
}
impl RObject for TMeUrlTypeStickerSet {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"tMeUrlTypeStickerSet"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl TDTMeUrlType for TMeUrlTypeStickerSet {}
impl TMeUrlTypeStickerSet {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDTMeUrlTypeStickerSetBuilder {
let mut inner = TMeUrlTypeStickerSet::default();
inner.td_name = "tMeUrlTypeStickerSet".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDTMeUrlTypeStickerSetBuilder { inner }
}
pub fn sticker_set_id(&self) -> isize {
self.sticker_set_id
}
}
#[doc(hidden)]
pub struct RTDTMeUrlTypeStickerSetBuilder {
inner: TMeUrlTypeStickerSet,
}
impl RTDTMeUrlTypeStickerSetBuilder {
pub fn build(&self) -> TMeUrlTypeStickerSet {
self.inner.clone()
}
pub fn sticker_set_id(&mut self, sticker_set_id: isize) -> &mut Self {
self.inner.sticker_set_id = sticker_set_id;
self
}
}
impl AsRef<TMeUrlTypeStickerSet> for TMeUrlTypeStickerSet {
fn as_ref(&self) -> &TMeUrlTypeStickerSet {
self
}
}
impl AsRef<TMeUrlTypeStickerSet> for RTDTMeUrlTypeStickerSetBuilder {
fn as_ref(&self) -> &TMeUrlTypeStickerSet {
&self.inner
}
}
/// A URL linking to a public supergroup or channel
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TMeUrlTypeSupergroup {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// Identifier of the supergroup or channel
supergroup_id: i64,
}
impl RObject for TMeUrlTypeSupergroup {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"tMeUrlTypeSupergroup"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl TDTMeUrlType for TMeUrlTypeSupergroup {}
impl TMeUrlTypeSupergroup {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDTMeUrlTypeSupergroupBuilder {
let mut inner = TMeUrlTypeSupergroup::default();
inner.td_name = "tMeUrlTypeSupergroup".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDTMeUrlTypeSupergroupBuilder { inner }
}
pub fn supergroup_id(&self) -> i64 {
self.supergroup_id
}
}
#[doc(hidden)]
pub struct RTDTMeUrlTypeSupergroupBuilder {
inner: TMeUrlTypeSupergroup,
}
impl RTDTMeUrlTypeSupergroupBuilder {
pub fn build(&self) -> TMeUrlTypeSupergroup {
self.inner.clone()
}
pub fn supergroup_id(&mut self, supergroup_id: i64) -> &mut Self {
self.inner.supergroup_id = supergroup_id;
self
}
}
impl AsRef<TMeUrlTypeSupergroup> for TMeUrlTypeSupergroup {
fn as_ref(&self) -> &TMeUrlTypeSupergroup {
self
}
}
impl AsRef<TMeUrlTypeSupergroup> for RTDTMeUrlTypeSupergroupBuilder {
fn as_ref(&self) -> &TMeUrlTypeSupergroup {
&self.inner
}
}
/// A URL linking to a user
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TMeUrlTypeUser {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// Identifier of the user
user_id: i64,
}
impl RObject for TMeUrlTypeUser {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"tMeUrlTypeUser"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl TDTMeUrlType for TMeUrlTypeUser {}
impl TMeUrlTypeUser {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDTMeUrlTypeUserBuilder {
let mut inner = TMeUrlTypeUser::default();
inner.td_name = "tMeUrlTypeUser".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDTMeUrlTypeUserBuilder { inner }
}
pub fn user_id(&self) -> i64 {
self.user_id
}
}
#[doc(hidden)]
pub struct RTDTMeUrlTypeUserBuilder {
inner: TMeUrlTypeUser,
}
impl RTDTMeUrlTypeUserBuilder {
pub fn build(&self) -> TMeUrlTypeUser {
self.inner.clone()
}
pub fn user_id(&mut self, user_id: i64) -> &mut Self {
self.inner.user_id = user_id;
self
}
}
impl AsRef<TMeUrlTypeUser> for TMeUrlTypeUser {
fn as_ref(&self) -> &TMeUrlTypeUser {
self
}
}
impl AsRef<TMeUrlTypeUser> for RTDTMeUrlTypeUserBuilder {
fn as_ref(&self) -> &TMeUrlTypeUser {
&self.inner
}
}
|
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
extern "C"
{
pub fn shmem_getmem(dest: *mut c_void, src: *const c_void, nelems: usize, pe: c_int);
pub fn shmem_getmem_nbi(dest: *mut c_void, source: *const c_void, nelems: usize, pe: c_int);
pub fn shmem_putmem(dest: *mut c_void, src: *const c_void, nelems: usize, pe: c_int);
pub fn shmem_putmem_nbi(dest: *mut c_void, source: *const c_void, nelems: usize, pe: c_int);
}
|
use std::iter::{Enumerate, ExactSizeIterator};
use std::fmt::Display;
use std::io::Write;
use std::io;
pub fn list<I, T>(iterator: Enumerate<I>) -> Option<usize>
where I: ExactSizeIterator + Iterator<Item = T>, T: Display {
for (index, display) in iterator {
println!("{}: {}", index, display);
}
None
}
pub fn prompt(nl_before: bool, nl_after: bool) {
if nl_before {
print!("\n");
}
print!("> ");
if nl_after {
print!("\n");
}
io::stdout().flush().expect("IO ERROR");
}
|
use pyo3::prelude::*;
use pyo3::exceptions;
use std::fs::File;
use std::io::{BufReader, BufRead};
use std::collections::HashMap;
use regex::Regex;
use crate::alignment::{SeqMatrix, new_seqmatrix};
lazy_static! {
static ref WHITESPACE_REGEX: Regex = Regex::new(r"\s+").unwrap();
}
// FASTA file readers
pub fn fasta_to_hashmap(path: &str) -> Result<HashMap<String, Vec<String>>, String> {
// Open the path in read-only mode, returns `io::Result<File>`
let f = match File::open(path) {
Err(_) => {
return Err(format!(
"encountered an error while trying to open file {:?}", path))
},
Ok(x) => x
};
let f = BufReader::new(f);
// Declare variables
let mut ids: Vec<String> = Vec::new();
let mut descriptions: Vec<String> = Vec::new();
let mut sequences: Vec<String> = Vec::new();
let mut comments: Vec<String> = Vec::new();
// Declare temp variables
let mut id = String::new();
let mut description = String::new();
let mut sequence = String::new();
// Match regexp
for line in f.lines() {
let line = match line {
Err(_) => {
return Err(format!(
"encountered an error while reading file {:?}", path))
},
Ok(x) => x.trim().to_string()
};
if line.starts_with(">") {
if sequence.len() > 0 {
ids.push(id.clone());
descriptions.push(description.clone());
sequences.push(sequence.clone());
}
let matches: Vec<&str> = WHITESPACE_REGEX
.splitn(line.trim_start_matches(">"), 2)
.collect();
id = matches[0].to_string();
description = match matches.len() {
l if l == 2 => matches[1].to_string(),
_ => String::new(),
};
sequence.clear();
// Handle comment line \;
} else if line.starts_with(";") {
id = line.trim_start_matches(";").to_string();
comments.push(line);
} else {
sequence.push_str(&line);
}
}
if sequence.len() > 0 {
ids.push(id.clone());
descriptions.push(description.clone());
sequences.push(sequence.clone());
}
let map: HashMap<String, Vec<String>> = [
("ids".to_string(), ids),
("descriptions".to_string(), descriptions),
("sequences".to_string(), sequences),
("comments".to_string(), comments),
].iter().cloned().collect();
Ok(map)
}
#[pyfunction]
/// fasta_to_dict(data_str, /)
/// --
///
/// Reads FASTA file and creates a list of Record objects.
fn fasta_to_dict(path: &str) -> PyResult<(SeqMatrix, HashMap<String, Vec<String>>)> {
match fasta_to_hashmap(path) {
Ok(mut d) => {
let data = d.remove("sequences").unwrap();
let seq_matrix = match new_seqmatrix(data) {
Ok(x) => x,
Err(x) => return Err(exceptions::ValueError::py_err(x)),
};
Ok((seq_matrix, d))
},
Err(x) => Err(exceptions::IOError::py_err(x))
}
}
// TODO: Make readers for other file types: PHYLIP, NEXUS
// Register python functions to PyO3
#[pymodinit]
fn readers(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_function!(fasta_to_dict))?;
Ok(())
} |
extern crate bodyparser;
extern crate exonum;
extern crate iron;
extern crate router;
extern crate serde;
extern crate serde_json;
use exonum::api::Api;
use exonum::blockchain::Transaction;
use exonum::messages::Message;
use hyper::header::ContentType;
use iron::headers::AccessControlAllowOrigin;
use iron::prelude::*;
use iron::status as istatus;
use router::Router;
use currency::api::error::ApiError;
use currency::transactions::{
AddAssets, DeleteAssets, Exchange, ExchangeIntermediary, Trade, TradeIntermediary, Transfer,
BidOffer, AskOffer, EXCHANGE_ID, EXCHANGE_INTERMEDIARY_ID, TRADE_ID, TRADE_INTERMEDIARY_ID,
TransferWithFeesPayer, TRANSFER_FEES_PAYER_ID,
};
#[derive(Clone)]
pub struct HexApi {}
#[serde(untagged)]
#[derive(Clone, Serialize, Deserialize)]
enum TransactionRequest {
Transfer(Transfer),
TransferWithFeesPayer(TransferWithFeesPayer),
AddAssets(AddAssets),
DeleteAssets(DeleteAssets),
Trade(Trade),
TradeIntermediary(TradeIntermediary),
Exchange(Exchange),
ExchangeIntermediary(ExchangeIntermediary),
BidOffer(BidOffer),
AskOffer(AskOffer),
}
impl Into<Box<Transaction>> for TransactionRequest {
fn into(self) -> Box<Transaction> {
match self {
TransactionRequest::Transfer(trans) => Box::new(trans),
TransactionRequest::TransferWithFeesPayer(trans) => Box::new(trans),
TransactionRequest::AddAssets(trans) => Box::new(trans),
TransactionRequest::DeleteAssets(trans) => Box::new(trans),
TransactionRequest::Trade(trans) => Box::new(trans),
TransactionRequest::TradeIntermediary(trans) => Box::new(trans),
TransactionRequest::Exchange(trans) => Box::new(trans),
TransactionRequest::ExchangeIntermediary(trans) => Box::new(trans),
TransactionRequest::BidOffer(trans) => Box::new(trans),
TransactionRequest::AskOffer(trans) => Box::new(trans),
}
}
}
#[derive(PartialEq, Eq, Serialize, Deserialize, Clone, Debug)]
pub struct HexResponse {
pub hex: String,
}
pub type HexApiResponse = Result<Option<HexResponse>, ApiError>;
impl HexApi {
pub fn hex_string(bytes: Vec<u8>) -> String {
let strs: Vec<String> = bytes.iter().map(|b| format!("{:02x}", b)).collect();
strs.join("")
}
}
impl Api for HexApi {
fn wire(&self, router: &mut Router) {
let hex_transaction = move |request: &mut Request| -> IronResult<Response> {
let body: HexApiResponse = match request.get::<bodyparser::Struct<TransactionRequest>>()
{
Ok(Some(transaction)) => {
let tx: Box<Transaction> = transaction.into();
let hex = Self::hex_string(tx.raw().body().to_vec());
Ok(Some(HexResponse { hex }))
}
Ok(None) => Err(ApiError::EmptyRequestBody),
Err(_) => Err(ApiError::IncorrectRequest),
};
let mut res = Response::with((
body.clone()
.err()
.map(|e| e.to_status())
.unwrap_or(istatus::Ok),
serde_json::to_string_pretty(&body).unwrap(),
));
res.headers.set(ContentType::json());
res.headers.set(AccessControlAllowOrigin::Any);
Ok(res)
};
let hex_tx_offer = move |request: &mut Request| -> IronResult<Response> {
let body: HexApiResponse = match request.get::<bodyparser::Struct<TransactionRequest>>()
{
Ok(Some(transaction)) => {
let transaction: Box<Transaction> = transaction.into();
let raw_ = transaction.raw().clone();
let vec_hash: Option<Vec<u8>> = match transaction.raw().message_type() {
TRANSFER_FEES_PAYER_ID => match TransferWithFeesPayer::from_raw(raw_) {
Ok(transfer) => Some(transfer.offer_raw()),
Err(_) => None,
},
EXCHANGE_ID => match Exchange::from_raw(raw_) {
Ok(exchange) => Some(exchange.offer_raw()),
Err(_) => None,
},
EXCHANGE_INTERMEDIARY_ID => match ExchangeIntermediary::from_raw(raw_) {
Ok(exchange) => Some(exchange.offer_raw()),
Err(_) => None,
},
TRADE_ID => match Trade::from_raw(raw_) {
Ok(trade) => Some(trade.offer_raw()),
Err(_) => None,
},
TRADE_INTERMEDIARY_ID => match TradeIntermediary::from_raw(raw_) {
Ok(trade) => Some(trade.offer_raw()),
Err(_) => None,
},
_ => None,
};
match vec_hash {
Some(vec) => {
let hex = Self::hex_string(vec);
Ok(Some(HexResponse { hex }))
}
None => Ok(None),
}
}
Ok(None) => Err(ApiError::EmptyRequestBody),
Err(_) => Err(ApiError::IncorrectRequest),
};
let mut res = Response::with((
body.clone()
.err()
.map(|e| e.to_status())
.unwrap_or(istatus::Ok),
serde_json::to_string_pretty(&body).unwrap(),
));
res.headers.set(ContentType::json());
res.headers.set(AccessControlAllowOrigin::Any);
Ok(res)
};
router.post("/v1/hex/transactions", hex_transaction, "hash_transaction");
router.post("/v1/hex/transactions/offer", hex_tx_offer, "hash_offer");
}
}
|
use errors::TextDiagnostic;
use rowan::TreeRoot;
use crate::language::LanguageId;
use crate::syntax_kind_set::IterableSyntaxKindSet;
use crate::syntax_kind::SyntaxKind;
use smol_str::SmolStr;
use rowan::WalkEvent;
pub mod algo;
pub mod syntax;
pub mod syntax_text;
|
use chrono::{Timelike, Utc};
pub fn youtube_api_key() -> &'static str {
if Utc::now().hour() % 2 == 0 {
env!("YOUTUBE_API_KEY0")
} else {
env!("YOUTUBE_API_KEY1")
}
}
|
fn main() {
fn consume_with_relish<F:FnOnce()->String>(func: F)
where F: FnOnce() -> String
{
println!("Consumed: {}", func());
println!("Super Happy I did it!");
}
// fn once function always consume values whether it is in println form or in its variable. it capture values from environment
// thats the reason i wrote print line after scope...
println!("Super Happy I did it!");
} |
use crate::query::{parse, Query, OrderOperator};
use err_derive::Error;
use std::any::type_name;
use std::collections::HashMap;
use std::str::FromStr;
use nom::Err;
type UtcDateTime = chrono::DateTime<chrono::Utc>;
#[derive(Debug, Error, PartialEq)]
pub enum Error {
#[error(display = "invalid value for field \"{}\", expect {}", field, accepted_type)]
InvalidValue {
field: String,
accepted_type: String,
},
#[error(display = "syntax error at position {}", pos)]
ParseError {
pos: usize,
},
#[error(display = "unsupported operation \"{}\" on field \"{}\"", required_operation, field)]
UnsupportedOperation {
field: String,
required_operation: String,
},
#[error(display = "unknown filed \"{}\"", field)]
UnknownField {
field: String,
},
#[error(display = "empty operation \"{}\" on wildcard field", required_operation)]
EmptyWildcardOperation {
required_operation: String,
}
}
pub type Result<T> = std::result::Result<T, Error>;
pub struct FieldConfig {
pub field: String,
pub rename: Option<String>,
pub type_name: Option<String>,
pub wildcard: bool,
pub partial_order: bool,
pub partial_equal: bool,
pub use_like: bool,
pub escape_handler: Option<EscapeHandler>,
}
pub type EscapeHandler = Box<dyn Fn(&str, &FieldConfig) -> Result<String> + Sync + Send>;
pub fn escape_quoted_with_converter<T: FromStr>(
converter: impl Fn(&T) -> String + 'static + Sync + Send,
) -> EscapeHandler {
Box::new(move |input: &str, config: &FieldConfig| {
let value: T = input.parse()
.map_err(|_| Error::InvalidValue {
field: config.field.clone(),
accepted_type: config.type_name.clone().unwrap_or_else(|| type_name::<T>().into()),
})?;
Ok(format!("'{}'", converter(&value).replace("'", "''")))
})
}
pub fn escape_quoted<T: FromStr + ToString + 'static>() -> EscapeHandler {
escape_quoted_with_converter(T::to_string)
}
pub fn escape_unquoted_with_converter<T: FromStr>(
converter: impl Fn(&T) -> String + 'static + Sync + Send,
) -> EscapeHandler {
Box::new(move |input: &str, config: &FieldConfig| {
let value: T = input.parse()
.map_err(|_| Error::InvalidValue {
field: config.field.clone(),
accepted_type: config.type_name.clone().unwrap_or_else(|| type_name::<T>().into()),
})?;
Ok(converter(&value))
})
}
pub fn escape_unquoted<T: FromStr + ToString + 'static>() -> EscapeHandler {
escape_unquoted_with_converter(T::to_string)
}
impl FieldConfig {
pub fn new(field: &str) -> Self {
Self {
field: field.into(),
rename: None,
type_name: None,
wildcard: false,
partial_order: false,
partial_equal: false,
use_like: false,
escape_handler: None,
}
}
pub fn new_number_field<T: FromStr + ToString + 'static>(field: &str, rename: Option<String>) -> Self {
Self {
field: field.into(),
rename,
type_name: None,
wildcard: false,
partial_order: true,
partial_equal: true,
use_like: false,
escape_handler: Some(escape_unquoted::<T>()),
}
}
pub fn new_date_time_field(field: &str, rename: Option<String>) -> Self {
Self {
field: field.into(),
rename,
type_name: Some("DateTime".into()),
wildcard: false,
partial_order: true,
partial_equal: true,
use_like: false,
escape_handler: Some(escape_quoted_with_converter(UtcDateTime::to_rfc3339)),
}
}
pub fn new_string_field(field: &str, rename: Option<String>) -> Self {
Self {
field: field.into(),
rename,
type_name: None,
wildcard: true,
partial_order: true,
partial_equal: true,
use_like: true,
escape_handler: None,
}
}
pub fn rename(mut self, rename: &str) -> Self {
self.rename = Some(rename.into());
self
}
pub fn type_name(mut self, type_name: &str) -> Self {
self.type_name = Some(type_name.into());
self
}
pub fn wildcard(mut self) -> Self {
self.wildcard = true;
self
}
pub fn partial_order(mut self) -> Self {
self.partial_order = true;
self
}
pub fn partial_equal(mut self) -> Self {
self.partial_equal = true;
self
}
pub fn use_like(mut self) -> Self {
self.use_like = true;
self
}
pub fn escape_handler(mut self, func: EscapeHandler) -> Self {
self.escape_handler = Some(func);
self
}
pub fn escape(&self, input: &str) -> Result<String> {
self.escape_handler.as_ref()
.unwrap_or(&escape_quoted::<String>())
(input, &self)
}
}
pub struct QueryConfig {
fields: HashMap<String, FieldConfig>,
}
impl Default for QueryConfig {
fn default() -> Self {
Self::new()
}
}
impl QueryConfig {
pub fn new() -> Self {
Self {
fields: HashMap::new(),
}
}
pub fn field(mut self, field: FieldConfig) -> Self {
self.fields.insert(field.field.clone(), field);
self
}
pub fn check_sortable(&self, field: &str) -> Result<String> {
let config = self.fields.get(field)
.ok_or_else(|| Error::UnknownField { field: field.into() })?;
if !config.partial_order {
return Err(Error::UnsupportedOperation {
field: field.into(),
required_operation: "order".into(),
});
}
Ok(config.rename.clone().unwrap_or_else(|| field.into()))
}
pub fn map_field(&self, field: &str) -> Result<String> {
let config = self.fields.get(field)
.ok_or_else(|| Error::UnknownField { field: field.into() })?;
Ok(config.rename.clone().unwrap_or_else(|| field.into()))
}
pub fn map_field_value(&self, field: &str, value: &str) -> Result<(String, String)> {
let config = self.fields.get(field)
.ok_or_else(|| Error::UnknownField { field: field.into() })?;
Ok((config.rename.clone().unwrap_or_else(|| field.into()), config.escape(value)?))
}
pub fn parse_to_postgres(&self, input: &str) -> Result<String> {
Ok(parse(input)
.map_err(|err| match err {
Err::Incomplete(..) => Error::ParseError { pos: input.len() },
Err::Error((rest, ..)) | Err::Failure((rest, ..))
=> Error::ParseError { pos: input.len() - rest.len() },
})?
.1
.as_ref()
.map(|x| self.query_to_postgres(x))
.transpose()?
.unwrap_or_else(|| "TRUE".into()))
}
pub fn query_to_postgres(&self, query: &Query) -> Result<String> {
let result = match query {
Query::Or { queries } => if queries.is_empty() { "TRUE".into() } else {
queries.iter()
.map(|x| self.query_to_postgres(x))
.collect::<Result<Vec<_>>>()?
.join(" OR ")
}
Query::And { queries } => if queries.is_empty() { "FALSE".into() } else {
queries.iter()
.map(|x| self.query_to_postgres(x))
.collect::<Result<Vec<_>>>()?
.join(" AND ")
}
Query::Not { query } => format!("NOT {}", self.query_to_postgres(query)?),
Query::Equal { field, value } => match field {
Some(field) => {
let config = self.fields.get(field)
.ok_or_else(|| Error::UnknownField { field: field.clone() })?;
if !config.partial_equal {
return Err(Error::UnsupportedOperation {
field: field.clone(),
required_operation: "equal".into(),
});
}
let rename = config.rename.as_ref().unwrap_or(field);
match value {
Some(value) => {
let value = config.escape(value)?;
if config.use_like {
let value = value
.replace("^", "^^")
.replace("%", "^%")
.replace("_", "^_");
format!("{} ILIKE '%{}%' ESCAPE '^'", rename, &value[1..value.len() - 1])
} else {
format!("{} = {}", rename, value)
}
}
None => format!("{} IS NULL", rename),
}
}
None => {
let queries = self.fields.values()
.filter(|x| x.wildcard && x.partial_equal)
.map(|config| {
let rename = config.rename.as_ref().unwrap_or(&config.field);
match value {
Some(value) => {
let value = config.escape(value)?;
Ok(if config.use_like {
let value = value
.replace("^", "^^")
.replace("%", "^%")
.replace("_", "^_");
format!("{} ILIKE '%{}%' ESCAPE '^'", rename, &value[1..value.len() - 1])
} else {
format!("{} = {}", rename, value)
})
}
None => Ok(format!("{} IS NULL", rename)),
}
})
.flat_map(Result::ok)
.collect::<Vec<_>>();
if queries.is_empty() {
return Err(Error::EmptyWildcardOperation {
required_operation: "equal".into(),
});
}
queries.join(" OR ")
}
}
Query::Order { field, operator, value } => {
let operator = match operator {
OrderOperator::Lte => "<=",
OrderOperator::Gte => ">=",
OrderOperator::Lt => "<",
OrderOperator::Gt => ">",
};
match field {
Some(field) => {
let config = self.fields.get(field)
.ok_or_else(|| Error::UnknownField { field: field.clone() })?;
if !config.partial_order {
return Err(Error::UnsupportedOperation {
field: field.clone(),
required_operation: "order".into(),
});
}
let value = config.escape(value)?;
let rename = config.rename.as_ref().unwrap_or(field);
format!("{} {} {}", rename, operator, value)
}
None => {
let queries = self.fields.values()
.filter(|x| x.wildcard && x.partial_order)
.map(|config| {
let value = config.escape(value)?;
let rename = config.rename.as_ref().unwrap_or(&config.field);
Ok(format!("{} {} {}", rename, operator, value))
})
.filter_map(Result::ok)
.collect::<Vec<_>>();
if queries.is_empty() {
return Err(Error::EmptyWildcardOperation {
required_operation: "order".into(),
});
}
queries.join(" OR ")
}
}
}
};
Ok(format!("({})", result))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn escape_to_string_test() {
assert_eq!(
FieldConfig::new("").escape("abc'inject"),
Ok("'abc''inject'".into()));
}
#[test]
pub fn escape_to_chrono_test() {
assert_eq!(
FieldConfig::new("")
.escape_handler(escape_quoted_with_converter(UtcDateTime::to_rfc3339))
.escape("2014-11-28T12:00:09Z"),
Ok("'2014-11-28T12:00:09+00:00'".into()));
assert_eq!(
FieldConfig::new("a")
.escape_handler(escape_quoted_with_converter(UtcDateTime::to_rfc3339))
.type_name("DateTime")
.escape("abc"),
Err(Error::InvalidValue {
field: "a".into(),
accepted_type: "DateTime".into(),
}));
}
#[test]
pub fn escape_to_number_test() {
assert_eq!(
FieldConfig::new("")
.escape_handler(escape_unquoted::<i32>())
.escape("-1"),
Ok("-1".into()));
assert_eq!(
FieldConfig::new("")
.escape_handler(escape_unquoted::<u32>())
.escape("-1"),
Err(Error::InvalidValue {
field: "".into(),
accepted_type: "u32".into(),
}));
}
#[test]
pub fn generator_test1() {
let generator = QueryConfig::new()
.field(FieldConfig::new("id")
.wildcard()
.partial_equal()
.partial_order()
.escape_handler(escape_unquoted::<i32>())
)
.field(FieldConfig::new("text")
.wildcard()
.rename("\"text\"")
.use_like()
.partial_equal()
);
assert_eq!(
generator.parse_to_postgres("id > 1 and (id < 1 id: 1)"),
Ok("((id > 1) AND ((id < 1) OR (id = 1)))".into())
);
assert_eq!(
generator.parse_to_postgres("\n"),
Ok("TRUE".into())
);
assert_eq!(
generator.parse_to_postgres("* > 2"),
Ok("(id > 2)".into())
);
let result = generator.parse_to_postgres("1");
assert!(
result == Ok("(id = 1 OR \"text\" ILIKE '%1%' ESCAPE '^')".into())
|| result == Ok("(\"text\" ILIKE '%1%' ESCAPE '^' OR id = 1)".into())
);
assert_eq!(
generator.parse_to_postgres("ab%c"),
Ok("(\"text\" ILIKE '%ab^%c%' ESCAPE '^')".into())
);
}
}
|
use proc_macro2::Span;
#[derive(Debug)]
pub enum Error {
UnsupportedExpr(Span),
// UnsupportedMethod(Span),
// UnsupportedStatement(Span),
// UnsuportedClosureArgument(Span),
// BlockMustHaveOneStatement(Span),
// BadAttribute(Span),
NotFound(Span),
CouldNotConvertToExpression(Span),
CouldNotConvertFromExpression(Span),
CouldNotParse(Span),
CouldNotEvaulate(Span),
WrongNumberOfTerms(Span),
}
pub type Result<T> = std::result::Result<T, Error>;
impl From<syn::Error> for Error {
fn from(_: syn::Error) -> Self {
Error::CouldNotParse(Span::call_site())
}
}
|
use std::io::{BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::fmt;
use crate::kv::Result;
#[derive(Debug)]
pub struct LogReader<T: Read> {
pub reader: BufReader<T>,
pub pos: u64, // the position of the log
}
impl<T: Read + Seek> LogReader<T> {
pub fn new(mut reader: T) -> Result<Self> {
let pos = reader.seek(SeekFrom::Current(0))?;
Ok(LogReader {
reader: BufReader::new(reader),
pos,
})
}
}
impl<T: Read + Seek> Read for LogReader<T> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let len = self.reader.read(buf)?;
self.pos += len as u64;
Ok(len)
}
}
impl<T: Read + Seek> Seek for LogReader<T> {
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
self.pos = self.reader.seek(pos)?;
Ok(self.pos)
}
}
impl<T: Read + Seek> BufRead for LogReader<T> {
fn fill_buf(&mut self) -> std::io::Result<&[u8]> {
self.reader.fill_buf()
}
fn consume(&mut self, amt: usize) {
// @TODO: This might be incorrect
self.pos += amt as u64;
self.reader.consume(amt);
}
}
pub struct LogWriter<T: Write> {
pub writer: BufWriter<T>,
pub filename: String,
pub pos: u64,
}
impl<T: Write + Seek> LogWriter<T> {
pub fn new(mut writer: T, filename: String) -> Result<Self> {
let pos = writer.seek(SeekFrom::End(0))?;
Ok(LogWriter {
writer: BufWriter::new(writer),
pos,
filename,
})
}
}
impl<T: Write + Seek> Write for LogWriter<T> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
let bytes_written = self.writer.write(buf)?;
self.pos += bytes_written as u64;
Ok(bytes_written)
}
fn flush(&mut self) -> std::io::Result<()> {
self.writer.flush()
}
}
impl<T: Write> fmt::Debug for LogWriter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Point")
.field("writer", &"writer")
.field("pos", &self.pos)
.finish()
}
}
impl<T: Write + Seek> Seek for LogWriter<T> {
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
self.pos = self.writer.seek(pos)?;
Ok(self.pos)
}
}
|
#[cfg(feature = "sputnik")]
/// Abstraction over [Sputnik EVM](https://github.com/rust-blockchain/evm)
pub mod sputnik;
/// Abstraction over [evmodin](https://github.com/rust-blockchain/evm)
#[cfg(feature = "evmodin")]
pub mod evmodin;
mod blocking_provider;
pub use blocking_provider::BlockingProvider;
use ethers::{
abi::{Detokenize, Function, Tokenize},
core::types::{Address, U256},
prelude::{decode_function_data, encode_function_data, Bytes},
};
use dapp_utils::get_func;
use eyre::Result;
// TODO: Any reason this should be an async trait?
/// Low-level abstraction layer for interfacing with various EVMs. Once instantiated, one
/// only needs to specify the transaction parameters
pub trait Evm<State> {
/// The returned reason type from an EVM (Success / Revert/ Stopped etc.)
type ReturnReason: std::fmt::Debug + PartialEq;
/// Whether a return reason should be considered successful
fn is_success(reason: &Self::ReturnReason) -> bool;
/// Whether a return reason should be considered failing
fn is_fail(reason: &Self::ReturnReason) -> bool;
/// Sets the provided contract bytecode at the corresponding addresses
fn initialize_contracts<I: IntoIterator<Item = (Address, Bytes)>>(&mut self, contracts: I);
/// Gets a reference to the current state of the EVM
fn state(&self) -> &State;
/// Resets the EVM's state to the provided value
fn reset(&mut self, state: State);
/// Executes the specified EVM call against the state
// TODO: Should we just make this take a `TransactionRequest` or other more
// ergonomic type?
fn call<D: Detokenize, T: Tokenize>(
&mut self,
from: Address,
to: Address,
func: &Function,
args: T, // derive arbitrary for Tokenize?
value: U256,
) -> Result<(D, Self::ReturnReason, u64)> {
let calldata = encode_function_data(func, args)?;
#[allow(deprecated)]
let is_static = func.constant ||
matches!(
func.state_mutability,
ethers::abi::StateMutability::View | ethers::abi::StateMutability::Pure
);
let (retdata, status, gas) = self.call_raw(from, to, calldata, value, is_static)?;
let retdata = decode_function_data(func, retdata, false)?;
Ok((retdata, status, gas))
}
fn call_raw(
&mut self,
from: Address,
to: Address,
calldata: Bytes,
value: U256,
is_static: bool,
) -> Result<(Bytes, Self::ReturnReason, u64)>;
/// Runs the `setUp()` function call to instantiate the contract's state
fn setup(&mut self, address: Address) -> Result<()> {
let (_, _, _) = self.call::<(), _>(
Address::zero(),
address,
&get_func("function setUp() external").unwrap(),
(),
0.into(),
)?;
// debug_assert_eq!(status, ExitReason::Succeed(ExitSucceed::Stopped));
Ok(())
}
/// Runs the `failed()` function call to inspect the test contract's state and
/// see whether the `failed` state var is set. This is to allow compatibility
/// with dapptools-style DSTest smart contracts to preserve emiting of logs
fn failed(&mut self, address: Address) -> Result<bool> {
let (failed, _, _) = self.call::<bool, _>(
Address::zero(),
address,
&get_func("function failed() returns (bool)").unwrap(),
(),
0.into(),
)?;
Ok(failed)
}
/// Given a smart contract address, the result type and whether it's expected to fail,
/// it returns the test's success status
fn check_success(
&mut self,
address: Address,
reason: &Self::ReturnReason,
should_fail: bool,
) -> bool {
if should_fail {
if Self::is_success(reason) {
self.failed(address).unwrap_or(false)
} else if Self::is_fail(reason) {
true
} else {
tracing::error!(?reason);
false
}
} else {
Self::is_success(reason)
}
}
// TODO: Should we add a "deploy contract" function as well, or should we assume that
// the EVM is instantiated with a DB that includes any needed contracts?
}
// Test helpers which are generic over EVM implementation
#[cfg(test)]
mod test_helpers {
use super::*;
use dapp_solc::SolcBuilder;
use ethers::{prelude::Lazy, utils::CompiledContract};
use std::collections::HashMap;
pub static COMPILED: Lazy<HashMap<String, CompiledContract>> =
Lazy::new(|| SolcBuilder::new("./testdata/*.sol", &[], &[]).unwrap().build_all().unwrap());
pub fn can_call_vm_directly<S, E: Evm<S>>(
mut evm: E,
addr: Address,
compiled: &CompiledContract,
) {
evm.initialize_contracts(vec![(addr, compiled.runtime_bytecode.clone())]);
let (_, status1, _) = evm
.call::<(), _>(
Address::zero(),
addr,
&get_func("function greet(string greeting) external").unwrap(),
"hi".to_owned(),
0.into(),
)
.unwrap();
let (retdata, status2, _) = evm
.call::<String, _>(
Address::zero(),
addr,
&get_func("function greeting() public view returns (string)").unwrap(),
(),
0.into(),
)
.unwrap();
assert_eq!(retdata, "hi");
vec![status1, status2].iter().for_each(|reason| {
let res = evm.check_success(addr, reason, false);
assert!(res);
});
}
pub fn solidity_unit_test<S, E: Evm<S>>(
mut evm: E,
addr: Address,
compiled: &CompiledContract,
) {
evm.initialize_contracts(vec![(addr, compiled.runtime_bytecode.clone())]);
// call the setup function to deploy the contracts inside the test
let (_, status1, _) = evm
.call::<(), _>(
Address::zero(),
addr,
&get_func("function setUp() external").unwrap(),
(),
0.into(),
)
.unwrap();
let (_, status2, _) = evm
.call::<(), _>(
Address::zero(),
addr,
&get_func("function testGreeting()").unwrap(),
(),
0.into(),
)
.unwrap();
vec![status1, status2].iter().for_each(|reason| {
let res = evm.check_success(addr, reason, false);
assert!(res);
});
// TODO: Add testFail
}
}
|
use clap::{crate_authors, crate_description, crate_name, crate_version, App, AppSettings, Arg};
use std::error::Error;
pub mod config;
pub mod logging;
pub mod subcommands;
pub async fn execute() -> Result<(), Box<dyn Error>> {
let app = subcommands::setup(app());
let app_m = app.get_matches();
let config = config::setup(&app_m).unwrap();
logging::setup(&config).unwrap();
subcommands::execute(&config, app_m).await?;
Ok(())
}
fn app() -> App<'static> {
App::new(crate_name!())
.about(crate_description!())
.version(crate_version!())
.author(crate_authors!())
.setting(AppSettings::ArgRequiredElseHelp)
.arg(
Arg::new("debug")
.short('d')
.long("debug")
.about("Turn debugging information on"),
)
}
|
use std::mem;
use std::ptr::null_mut;
use bincode;
use flate2;
use libc::c_void;
use bw;
use dat;
use entity_serialize::{self, deserialize_entity, entity_serializable, EntitySerializable};
use save::{fread, fwrite, fread_num, fwrite_num, SaveError, LoadError, print_text};
use sprites::{
sprite_to_id_current_mapping,
sprite_from_id_current_mapping,
lone_sprite_from_id_current_mapping,
lone_sprite_to_id_current_mapping,
};
const UNIT_SAVE_MAGIC: u16 = 0xffed;
// 16 megabytes, should be more than enough, both compressed and without.
const UNIT_SAVE_MAX_SIZE: u32 = 0x1_000_000;
struct ConvertUnits;
const GHOST: u16 = 0x01;
const CARRIER: u16 = 0x48;
const WARBRINGER: u16 = 0x51;
const GANTRITHOR: u16 = 0x52;
const REAVER: u16 = 0x53;
const SCARAB: u16 = 0x55;
const INTERCEPTOR: u16 = 0x49;
const NUCLEAR_SILO: u16 = 0x6c;
const PYLON: u16 = 0x9c;
const MINERAL_FIELD1: u16 = 0xb0;
const MINERAL_FIELD2: u16 = 0xb1;
const MINERAL_FIELD3: u16 = 0xb2;
const VESPENE_GEYSER: u16 = 0xbc;
impl entity_serialize::SaveEntityPointer for ConvertUnits {
type Pointer = bw::Unit;
fn pointer_to_id(&self, val: *mut bw::Unit) -> Result<u32, SaveError> {
Ok(unit_to_id(val) as u32)
}
}
impl entity_serialize::LoadEntityPointer for ConvertUnits {
type Pointer = bw::Unit;
fn id_to_pointer(&self, val: u32) -> Result<*mut bw::Unit, LoadError> {
unit_from_id(val as u16)
}
}
#[derive(Serialize, Deserialize)]
struct SaveGlobals {
first_active: u16,
last_active: u16,
first_hidden: u16,
last_hidden: u16,
first_dying: u16,
last_dying: u16,
first_revealer: u16,
last_revealer: u16,
first_free: u16,
last_free: u16,
first_invisible: u16,
player_units: [u16; 0xc],
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
enum UnitAiSerializable {
NoAi,
Guard(u16),
Worker(u16),
Building(u16),
Military(u16),
}
impl UnitAiSerializable {
unsafe fn new(ai: *mut bw::UnitAi) -> Result<UnitAiSerializable, SaveError> {
use self::UnitAiSerializable::*;
if ai == null_mut() {
Ok(NoAi)
} else {
match (*ai).ty {
1 => {
let ptr: *mut bw::GuardAi = &mut bw::guard_ais[0];
let val = (ai as usize - ptr as usize) / mem::size_of::<bw::GuardAi>();
assert!(val < 1000);
Ok(Guard(val as u16))
}
2 => {
let ptr: *mut bw::WorkerAi = &mut bw::worker_ais[0];
let val = (ai as usize - ptr as usize) / mem::size_of::<bw::WorkerAi>();
assert!(val < 1000);
Ok(Worker(val as u16))
}
3 => {
let ptr: *mut bw::BuildingAi = &mut bw::building_ais[0];
let val = (ai as usize - ptr as usize) / mem::size_of::<bw::BuildingAi>();
assert!(val < 1000);
Ok(Building(val as u16))
}
4 => {
let ptr: *mut bw::MilitaryAi = &mut bw::military_ais[0];
let val = (ai as usize - ptr as usize) / mem::size_of::<bw::MilitaryAi>();
assert!(val < 1000);
Ok(Military(val as u16))
}
_ => Err(SaveError::InvalidUnitAi((*ai).ty)),
}
}
}
unsafe fn to_pointer(self) -> Result<*mut bw::UnitAi, LoadError> {
use self::UnitAiSerializable::*;
match self {
NoAi => Ok(null_mut()),
Guard(val) => {
let val = val as usize;
if val >= 1000 {
Err(LoadError::Corrupted(format!("Invalid unit ai {:?}", self)))
} else {
Ok(&mut bw::guard_ais[val] as *mut bw::GuardAi as *mut bw::UnitAi)
}
}
Worker(val) => {
let val = val as usize;
if val >= 1000 {
Err(LoadError::Corrupted(format!("Invalid unit ai {:?}", self)))
} else {
Ok(&mut bw::worker_ais[val] as *mut bw::WorkerAi as *mut bw::UnitAi)
}
}
Building(val) => {
let val = val as usize;
if val >= 1000 {
Err(LoadError::Corrupted(format!("Invalid unit ai {:?}", self)))
} else {
Ok(&mut bw::building_ais[val] as *mut bw::BuildingAi as *mut bw::UnitAi)
}
}
Military(val) => {
let val = val as usize;
if val >= 1000 {
Err(LoadError::Corrupted(format!("Invalid unit ai {:?}", self)))
} else {
Ok(&mut bw::military_ais[val] as *mut bw::MilitaryAi as *mut bw::UnitAi)
}
}
}
}
}
#[derive(Serialize, Deserialize, Clone)]
struct UnitSpecificSerializable([u8; 0x10]);
impl UnitSpecificSerializable {
unsafe fn new(
mut data: [u8; 0x10],
unit_id: u16,
is_building: bool,
) -> Result<UnitSpecificSerializable, SaveError> {
let ptr = data.as_mut_ptr();
if has_hangar(unit_id) {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
*(ptr.offset(4) as *mut u32) =
unit_to_id(*(ptr.offset(4) as *const *mut bw::Unit)) as u32;
} else if unit_id == INTERCEPTOR || unit_id == SCARAB {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
*(ptr.offset(4) as *mut u32) =
unit_to_id(*(ptr.offset(4) as *const *mut bw::Unit)) as u32;
*(ptr.offset(8) as *mut u32) =
unit_to_id(*(ptr.offset(8) as *const *mut bw::Unit)) as u32;
} else if is_building {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
} else if is_worker(unit_id) {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
*(ptr.offset(8) as *mut u32) =
unit_to_id(*(ptr.offset(8) as *const *mut bw::Unit)) as u32;
}
Ok(UnitSpecificSerializable(data))
}
unsafe fn deserialize(
mut self,
unit_id: u16,
is_building: bool
) -> Result<[u8; 0x10], LoadError> {
let ptr = self.0.as_mut_ptr();
if has_hangar(unit_id) {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
*(ptr.offset(4) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(4) as *const u16))?;
} else if unit_id == INTERCEPTOR || unit_id == SCARAB {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
*(ptr.offset(4) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(4) as *const u16))?;
*(ptr.offset(8) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(8) as *const u16))?;
} else if is_building {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
} else if is_worker(unit_id) {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
*(ptr.offset(8) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(8) as *const u16))?;
}
Ok(self.0)
}
}
#[derive(Serialize, Deserialize, Clone)]
struct UnitSpecificSerializable2([u8; 0xc]);
impl UnitSpecificSerializable2 {
unsafe fn new(
mut data: [u8; 0xc],
unit_id: u16,
) -> Result<UnitSpecificSerializable2, SaveError> {
let ptr = data.as_mut_ptr();
if is_resource(unit_id) {
*(ptr.offset(4) as *mut u32) =
unit_to_id(*(ptr.offset(4) as *const *mut bw::Unit)) as u32;
} else if is_powerup(unit_id) {
*(ptr.offset(4) as *mut u32) =
unit_to_id(*(ptr.offset(4) as *const *mut bw::Unit)) as u32;
} else if is_worker(unit_id) {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
*(ptr.offset(4) as *mut u32) =
unit_to_id(*(ptr.offset(4) as *const *mut bw::Unit)) as u32;
*(ptr.offset(8) as *mut u32) =
unit_to_id(*(ptr.offset(8) as *const *mut bw::Unit)) as u32;
} else if unit_id == NUCLEAR_SILO {
*(ptr.offset(0) as *mut u32) =
unit_to_id(*(ptr.offset(0) as *const *mut bw::Unit)) as u32;
} else if unit_id == GHOST {
*(ptr.offset(0) as *mut u32) = lone_sprite_to_id_current_mapping(
*(ptr.offset(0) as *const *mut bw::LoneSprite)
)? as u32;
} else if unit_id == PYLON {
*(ptr.offset(0) as *mut u32) =
sprite_to_id_current_mapping(*(ptr.offset(0) as *const *mut bw::Sprite))? as u32;
}
Ok(UnitSpecificSerializable2(data))
}
unsafe fn deserialize(
mut self,
unit_id: u16,
) -> Result<[u8; 0xc], LoadError> {
let ptr = self.0.as_mut_ptr();
if is_resource(unit_id) {
*(ptr.offset(4) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(4) as *const u16))?;
} else if is_powerup(unit_id) {
*(ptr.offset(4) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(4) as *const u16))?;
} else if is_worker(unit_id) {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
*(ptr.offset(4) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(4) as *const u16))?;
*(ptr.offset(8) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(8) as *const u16))?;
} else if unit_id == NUCLEAR_SILO {
*(ptr.offset(0) as *mut *mut bw::Unit) =
unit_from_id(*(ptr.offset(0) as *const u16))?;
} else if unit_id == GHOST {
*(ptr.offset(0) as *mut *mut bw::LoneSprite) =
lone_sprite_from_id_current_mapping(*(ptr.offset(0) as *const u32))?;
} else if unit_id == PYLON {
*(ptr.offset(0) as *mut *mut bw::Sprite) =
sprite_from_id_current_mapping(*(ptr.offset(0) as *const u32))?;
}
Ok(self.0)
}
}
fn has_hangar(unit_id: u16) -> bool {
unit_id == CARRIER ||
unit_id == GANTRITHOR ||
unit_id == REAVER ||
unit_id == WARBRINGER
}
fn is_resource(unit_id: u16) -> bool {
unit_id == MINERAL_FIELD1 ||
unit_id == MINERAL_FIELD2 ||
unit_id == MINERAL_FIELD3 ||
unit_id == VESPENE_GEYSER
}
unsafe fn is_worker(unit_id: u16) -> bool {
dat::units::flags(unit_id) & 0x8 != 0
}
unsafe fn is_powerup(unit_id: u16) -> bool {
dat::units::flags(unit_id) & 0x800 != 0
}
#[derive(Serialize, Deserialize, Clone)]
struct RallyPylonSerializable {
val1: u16,
val2: u16,
val3: u16,
}
impl RallyPylonSerializable {
unsafe fn new(data: [u8; 0x8], unit_id: u16) -> RallyPylonSerializable {
let data = data.as_ptr();
if unit_id == PYLON {
RallyPylonSerializable {
val1: unit_to_id(*(data.offset(0) as *const *mut bw::Unit)),
val2: unit_to_id(*(data.offset(4) as *const *mut bw::Unit)),
val3: 0,
}
} else {
// Whatever
RallyPylonSerializable {
val1: *(data.offset(0) as *const u16),
val2: *(data.offset(2) as *const u16),
val3: unit_to_id(*(data.offset(4) as *const *mut bw::Unit)),
}
}
}
unsafe fn deserialize(self, unit_id: u16) -> Result<[u8; 8], LoadError> {
let mut result = [0u8; 8];
let ptr = result.as_mut_ptr();
if unit_id == PYLON {
*(ptr.offset(0) as *mut *mut bw::Unit) = unit_from_id(self.val1)?;
*(ptr.offset(4) as *mut *mut bw::Unit) = unit_from_id(self.val2)?;
} else {
*(ptr.offset(0) as *mut u16) = self.val1;
*(ptr.offset(2) as *mut u16) = self.val2;
*(ptr.offset(4) as *mut *mut bw::Unit) = unit_from_id(self.val3)?;
}
Ok(result)
}
}
#[derive(Serialize, Deserialize)]
struct UnitSerializable {
entity: EntitySerializable,
shields: i32,
unit_id: u16,
unused66: u16,
next_player_unit: u16,
prev_player_unit: u16,
subunit: u16,
order_queue_begin: u16,
order_queue_end: u16,
previous_attacker: u16,
related: u16,
highlight_order_count: u8,
order_wait: u8,
unk86: u8,
attack_notify_timer: u8,
previous_unit_id: u16,
minimap_draw_counter: u8,
minimap_draw_color: u8,
unused8c: u16,
rank: u8,
kills: u8,
last_attacking_player: u8,
secondary_order_wait: u8,
ai_spell_flags: u8,
order_flags: u8,
buttons: u16,
invisibility_effects: u8,
movement_state: u8,
build_queue: [u16; 5],
energy: u16,
current_build_slot: u8,
minor_unique_index: u8,
secondary_order: u8,
building_overlay_state: u8,
build_hp_gain: u16,
build_shield_gain: u16,
remaining_build_time: u16,
previous_hp: u16,
loaded_units: [u16; 8],
unit_specific: UnitSpecificSerializable,
unit_specific2: UnitSpecificSerializable2,
flags: u32,
carried_powerup_flags: u8,
wireframe_seed: u8,
secondary_order_state: u8,
move_target_update_timer: u8,
detection_status: u32,
unke8: u16,
unkea: u16,
currently_building: u16,
next_invisible: u16,
prev_invisible: u16,
rally_pylon: RallyPylonSerializable,
path: u16,
path_frame: u8,
pathing_flags: u8,
_unk106: u8,
_unk107: u8,
collision_points: [u16; 0x4],
spells: UnitSpellsSerializable,
bullet_spread_seed: u16,
_padding132: [u8; 2],
ai: UnitAiSerializable,
air_strength: u16,
ground_strength: u16,
pos_search_left: u32,
pos_search_right: u32,
pos_search_top: u32,
pos_search_bottom: u32,
repulse: bw::Repulse,
}
#[derive(Serialize, Deserialize)]
struct UnitSpellsSerializable {
death_timer: u16,
defensive_matrix_dmg: u16,
matrix_timer: u8,
stim_timer: u8,
ensnare_timer: u8,
lockdown_timer: u8,
irradiate_timer: u8,
stasis_timer: u8,
plague_timer: u8,
is_under_storm: u8,
irradiated_by: u16,
irradiate_player: u8,
parasited_by_players: u8,
master_spell_timer: u8,
is_blind: u8,
maelstrom_timer: u8,
_unk125: u8,
acid_spore_count: u8,
acid_spore_timers: [u8; 0x9],
}
pub unsafe fn save_unit_chunk(file: *mut c_void) -> u32 {
if let Err(e) = save_units(file) {
error!("Couldn't save units: {}", e);
print_text(&format!("Unable to save the game: {}", e));
return 0;
}
1
}
unsafe fn save_units(file: *mut c_void) -> Result<(), SaveError> {
let data = serialize_units()?;
fwrite_num(file, UNIT_SAVE_MAGIC)?;
fwrite_num(file, 1u32)?;
fwrite_num(file, data.len() as u32)?;
fwrite(file, &data)?;
Ok(())
}
unsafe fn serialize_units() -> Result<Vec<u8>, SaveError> {
let buf = Vec::with_capacity(0x10000);
let mut writer = flate2::write::DeflateEncoder::new(buf, flate2::Compression::Default);
let size_limit = bincode::Bounded(UNIT_SAVE_MAX_SIZE as u64);
let globals = SaveGlobals {
first_active: unit_to_id(*bw::first_active_unit),
last_active: unit_to_id(*bw::last_active_unit),
first_hidden: unit_to_id(*bw::first_hidden_unit),
last_hidden: unit_to_id(*bw::last_hidden_unit),
first_dying: unit_to_id(*bw::first_dying_unit),
last_dying: unit_to_id(*bw::last_dying_unit),
first_revealer: unit_to_id(*bw::first_revealer),
last_revealer: unit_to_id(*bw::last_revealer),
first_invisible: unit_to_id(*bw::first_invisible_unit),
first_free: unit_to_id(*bw::first_free_unit),
last_free: unit_to_id(*bw::last_free_unit),
player_units: {
let mut ids = [0; 0xc];
for (&unit, out) in bw::first_player_unit.iter().zip(ids.iter_mut()) {
*out = unit_to_id(unit);
}
ids
}
};
bincode::serialize_into(&mut writer, &globals, size_limit)?;
for unit in bw::units.iter() {
let serializable = unit_serializable(unit)?;
bincode::serialize_into(&mut writer, &serializable, size_limit)?;
if writer.total_in() > UNIT_SAVE_MAX_SIZE as u64{
return Err(SaveError::SizeLimit(writer.total_in()));
}
}
Ok(writer.finish()?)
}
unsafe fn unit_serializable(unit: *const bw::Unit) -> Result<UnitSerializable, SaveError> {
let bw::Unit {
ref entity,
shields,
unit_id,
unused66,
next_player_unit,
prev_player_unit,
subunit,
order_queue_begin,
order_queue_end,
previous_attacker,
related,
highlight_order_count,
order_wait,
unk86,
attack_notify_timer,
previous_unit_id,
minimap_draw_counter,
minimap_draw_color,
unused8c,
rank,
kills,
last_attacking_player,
secondary_order_wait,
ai_spell_flags,
order_flags,
buttons,
invisibility_effects,
movement_state,
build_queue,
energy,
current_build_slot,
minor_unique_index,
secondary_order,
building_overlay_state,
build_hp_gain,
build_shield_gain,
remaining_build_time,
previous_hp,
loaded_units,
unit_specific,
unit_specific2,
flags,
carried_powerup_flags,
wireframe_seed,
secondary_order_state,
move_target_update_timer,
detection_status,
unke8,
unkea,
currently_building,
next_invisible,
prev_invisible,
rally_pylon,
path,
path_frame,
pathing_flags,
_unk106,
_unk107,
collision_points,
spells: bw::UnitSpells {
death_timer,
defensive_matrix_dmg,
matrix_timer,
stim_timer,
ensnare_timer,
lockdown_timer,
irradiate_timer,
stasis_timer,
plague_timer,
is_under_storm,
irradiated_by,
irradiate_player,
parasited_by_players,
master_spell_timer,
is_blind,
maelstrom_timer,
_unk125,
acid_spore_count,
acid_spore_timers,
},
bullet_spread_seed,
_padding132,
ai,
air_strength,
ground_strength,
pos_search_left,
pos_search_right,
pos_search_top,
pos_search_bottom,
ref repulse
} = *unit;
let is_building = flags & 0x2 != 0;
Ok(UnitSerializable {
entity: entity_serializable(entity, &ConvertUnits)?,
shields,
unit_id,
unused66,
next_player_unit: unit_to_id(next_player_unit),
prev_player_unit: unit_to_id(prev_player_unit),
subunit: unit_to_id(subunit),
order_queue_begin: order_to_id(order_queue_begin),
order_queue_end: order_to_id(order_queue_end),
previous_attacker: unit_to_id(previous_attacker),
related: unit_to_id(related),
highlight_order_count,
order_wait,
unk86,
attack_notify_timer,
previous_unit_id,
minimap_draw_counter,
minimap_draw_color,
unused8c,
rank,
kills,
last_attacking_player,
secondary_order_wait,
ai_spell_flags,
order_flags,
buttons,
invisibility_effects,
movement_state,
build_queue,
energy,
current_build_slot,
minor_unique_index,
secondary_order,
building_overlay_state,
build_hp_gain,
build_shield_gain,
remaining_build_time,
previous_hp,
loaded_units,
unit_specific: UnitSpecificSerializable::new(unit_specific, unit_id, is_building)?,
unit_specific2: UnitSpecificSerializable2::new(unit_specific2, unit_id)?,
flags,
carried_powerup_flags,
wireframe_seed,
secondary_order_state,
move_target_update_timer,
detection_status,
unke8,
unkea,
currently_building: unit_to_id(currently_building),
next_invisible: unit_to_id(next_invisible),
prev_invisible: unit_to_id(prev_invisible),
rally_pylon: RallyPylonSerializable::new(rally_pylon, unit_id),
path: path_to_id(path),
path_frame,
pathing_flags,
_unk106,
_unk107,
collision_points,
spells: UnitSpellsSerializable {
death_timer,
defensive_matrix_dmg,
matrix_timer,
stim_timer,
ensnare_timer,
lockdown_timer,
irradiate_timer,
stasis_timer,
plague_timer,
is_under_storm,
irradiated_by: unit_to_id(irradiated_by),
irradiate_player,
parasited_by_players,
master_spell_timer,
is_blind,
maelstrom_timer,
_unk125,
acid_spore_count,
acid_spore_timers,
},
bullet_spread_seed,
_padding132,
ai: UnitAiSerializable::new(ai)?,
air_strength,
ground_strength,
pos_search_left,
pos_search_right,
pos_search_top,
pos_search_bottom,
repulse: repulse.clone(),
})
}
pub unsafe fn load_unit_chunk(file: *mut c_void, save_version: u32) -> u32 {
if save_version != 3 {
error!("Unusupported save version: {}", save_version);
return 0;
}
if let Err(e) = load_units(file) {
info!("Couldn't load a save: {}", e);
return 0;
}
1
}
unsafe fn load_units(file: *mut c_void) -> Result<(), LoadError> {
let magic = fread_num::<u16>(file)?;
if magic != UNIT_SAVE_MAGIC {
return Err(LoadError::WrongMagic(magic));
}
let version = fread_num::<u32>(file)?;
if version != 1 {
return Err(LoadError::Version(version));
}
let size = fread_num::<u32>(file)?;
if size > UNIT_SAVE_MAX_SIZE {
return Err(LoadError::Corrupted(format!("Unit chunk size {} is too large", size)));
}
let data = fread(file, size)?;
let mut reader = flate2::read::DeflateDecoder::new(&data[..]);
let size_limit = bincode::Bounded(UNIT_SAVE_MAX_SIZE as u64);
let globals: SaveGlobals = bincode::deserialize_from(&mut reader, size_limit)?;
for unit in bw::units.iter_mut() {
let serialized = bincode::deserialize_from(&mut reader, size_limit)?;
*unit = deserialize_unit(&serialized)?;
if reader.total_out() > UNIT_SAVE_MAX_SIZE as u64 {
return Err(LoadError::SizeLimit)
}
}
*bw::first_active_unit = unit_from_id(globals.first_active)?;
*bw::first_hidden_unit = unit_from_id(globals.first_hidden)?;
*bw::first_dying_unit = unit_from_id(globals.first_dying)?;
*bw::first_revealer= unit_from_id(globals.first_revealer)?;
*bw::first_free_unit = unit_from_id(globals.first_free)?;
*bw::first_invisible_unit = unit_from_id(globals.first_invisible)?;
*bw::last_active_unit = unit_from_id(globals.last_active)?;
*bw::last_hidden_unit = unit_from_id(globals.last_hidden)?;
*bw::last_dying_unit = unit_from_id(globals.last_dying)?;
*bw::last_revealer= unit_from_id(globals.last_revealer)?;
*bw::last_free_unit = unit_from_id(globals.last_free)?;
for (unit, &saved) in bw::first_player_unit.iter_mut().zip(globals.player_units.iter()) {
*unit = unit_from_id(saved)?;
}
let mut unit = *bw::first_active_unit;
while unit != null_mut() {
add_unit_to_game(unit);
unit = (*unit).entity.next as *mut bw::Unit;
}
Ok(())
}
unsafe fn add_unit_to_game(unit: *mut bw::Unit) {
if (*unit).pos_search_left != !0 {
(*unit).pos_search_left = !0;
(*unit).pos_search_top = !0;
(*unit).pos_search_right = !0;
(*unit).pos_search_bottom = !0;
bw::add_to_pos_search(unit);
if (*unit).flags & 0x2 != 0 {
let pos = (*(*unit).entity.sprite).position;
bw::set_building_tile_flag(unit, pos.x as u32, pos.y as u32);
}
bw::check_unstack(unit);
if (*unit).flags & 0x4 != 0 {
bw::add_to_repulse_chunk(unit);
}
}
}
unsafe fn deserialize_unit(unit: &UnitSerializable) -> Result<bw::Unit, LoadError> {
let UnitSerializable {
ref entity,
shields,
unit_id,
unused66,
next_player_unit,
prev_player_unit,
subunit,
order_queue_begin,
order_queue_end,
previous_attacker,
related,
highlight_order_count,
order_wait,
unk86,
attack_notify_timer,
previous_unit_id,
minimap_draw_counter,
minimap_draw_color,
unused8c,
rank,
kills,
last_attacking_player,
secondary_order_wait,
ai_spell_flags,
order_flags,
buttons,
invisibility_effects,
movement_state,
build_queue,
energy,
current_build_slot,
minor_unique_index,
secondary_order,
building_overlay_state,
build_hp_gain,
build_shield_gain,
remaining_build_time,
previous_hp,
loaded_units,
ref unit_specific,
ref unit_specific2,
flags,
carried_powerup_flags,
wireframe_seed,
secondary_order_state,
move_target_update_timer,
detection_status,
unke8,
unkea,
currently_building,
next_invisible,
prev_invisible,
ref rally_pylon,
path,
path_frame,
pathing_flags,
_unk106,
_unk107,
collision_points,
spells: UnitSpellsSerializable {
death_timer,
defensive_matrix_dmg,
matrix_timer,
stim_timer,
ensnare_timer,
lockdown_timer,
irradiate_timer,
stasis_timer,
plague_timer,
is_under_storm,
irradiated_by,
irradiate_player,
parasited_by_players,
master_spell_timer,
is_blind,
maelstrom_timer,
_unk125,
acid_spore_count,
acid_spore_timers,
},
bullet_spread_seed,
_padding132,
ref ai,
air_strength,
ground_strength,
pos_search_left,
pos_search_right,
pos_search_top,
pos_search_bottom,
ref repulse,
} = *unit;
let is_building = flags & 0x2 != 0;
Ok(bw::Unit {
entity: deserialize_entity(entity, &ConvertUnits)?,
shields,
unit_id,
unused66,
next_player_unit: unit_from_id(next_player_unit)?,
prev_player_unit: unit_from_id(prev_player_unit)?,
subunit: unit_from_id(subunit)?,
order_queue_begin: order_from_id(order_queue_begin)?,
order_queue_end: order_from_id(order_queue_end)?,
previous_attacker: unit_from_id(previous_attacker)?,
related: unit_from_id(related)?,
highlight_order_count,
order_wait,
unk86,
attack_notify_timer,
previous_unit_id,
minimap_draw_counter,
minimap_draw_color,
unused8c,
rank,
kills,
last_attacking_player,
secondary_order_wait,
ai_spell_flags,
order_flags,
buttons,
invisibility_effects,
movement_state,
build_queue,
energy,
current_build_slot,
minor_unique_index,
secondary_order,
building_overlay_state,
build_hp_gain,
build_shield_gain,
remaining_build_time,
previous_hp,
loaded_units,
unit_specific: unit_specific.clone().deserialize(unit_id, is_building)?,
unit_specific2: unit_specific2.clone().deserialize(unit_id)?,
flags,
carried_powerup_flags,
wireframe_seed,
secondary_order_state,
move_target_update_timer,
detection_status,
unke8,
unkea,
currently_building: unit_from_id(currently_building)?,
next_invisible: unit_from_id(next_invisible)?,
prev_invisible: unit_from_id(prev_invisible)?,
rally_pylon: rally_pylon.clone().deserialize(unit_id)?,
path: path_from_id(path)?,
path_frame,
pathing_flags,
_unk106,
_unk107,
collision_points,
spells: bw::UnitSpells {
death_timer,
defensive_matrix_dmg,
matrix_timer,
stim_timer,
ensnare_timer,
lockdown_timer,
irradiate_timer,
stasis_timer,
plague_timer,
is_under_storm,
irradiated_by: unit_from_id(irradiated_by)?,
irradiate_player,
parasited_by_players,
master_spell_timer,
is_blind,
maelstrom_timer,
_unk125,
acid_spore_count,
acid_spore_timers,
},
bullet_spread_seed,
_padding132,
ai: ai.to_pointer()?,
air_strength,
ground_strength,
pos_search_left,
pos_search_right,
pos_search_top,
pos_search_bottom,
repulse: repulse.clone(),
})
}
pub fn unit_to_id(val: *mut bw::Unit) -> u16 {
unsafe {
if val == null_mut() {
0
} else {
let ptr: *mut bw::Unit = &mut bw::units[0];
let val = (val as usize - ptr as usize) / mem::size_of::<bw::Unit>();
assert!(val < 1700);
val as u16 + 1
}
}
}
pub fn unit_from_id(val: u16) -> Result<*mut bw::Unit, LoadError> {
if val == 0 {
Ok(null_mut())
} else if val <= 1700 {
unsafe { Ok(&mut bw::units[val as usize - 1]) }
} else {
Err(LoadError::Corrupted(format!("Invalid unit id 0x{:x}", val)))
}
}
pub fn order_to_id(val: *mut bw::Order) -> u16 {
unsafe {
if val == null_mut() {
0
} else {
let ptr: *mut bw::Order = &mut bw::orders[0];
let val = (val as usize - ptr as usize) / mem::size_of::<bw::Order>();
assert!(val < 2000);
val as u16 + 1
}
}
}
pub fn order_from_id(val: u16) -> Result<*mut bw::Order, LoadError> {
if val == 0 {
Ok(null_mut())
} else if val <= 2000 {
unsafe { Ok(&mut bw::orders[val as usize - 1]) }
} else {
Err(LoadError::Corrupted(format!("Invalid order id 0x{:x}", val)))
}
}
pub fn path_to_id(val: *mut bw::Path) -> u16 {
unsafe {
if val == null_mut() {
0
} else {
let ptr: *mut bw::Path = *bw::path_array_start;
let val = (val as usize - ptr as usize) / mem::size_of::<bw::Path>();
val as u16 + 1
}
}
}
pub fn path_from_id(val: u16) -> Result<*mut bw::Path, LoadError> {
if val == 0 {
Ok(null_mut())
} else {
unsafe { Ok(bw::path_array_start.offset(val as isize - 1)) }
}
}
|
//
// https://www.snoyman.com/blog/2018/10/introducing-rust-crash-course
//
// https://doc.rust-lang.org/rust-by-example/scope/raii.html
// https://doc.rust-lang.org/book
// https://learnxinyminutes.com/docs/rust/
// https://science.raphael.poss.name/rust-for-functional-programmers.html
//
fn main() {
let v: Vec<i32> = vec![1, 2, 3, 4];
println!("{:?}", v);
}
|
// 给定一个由整数组成的非空数组所表示的非负整数,在该数的基础上加一。
// 最高位数字存放在数组的首位, 数组中每个元素只存储一个数字。
// 你可以假设除了整数 0 之外,这个整数不会以零开头。
// 示例 1:
// 输入: [1,2,3]
// 输出: [1,2,4]
// 解释: 输入数组表示数字 123。
// 示例 2:
// 输入: [4,3,2,1]
// 输出: [4,3,2,2]
// 解释: 输入数组表示数字 4321。
//
struct Solution{}
impl Solution {
pub fn plus_one(digits: Vec<i32>) -> Vec<i32> {
let length: usize = digits.len();
let mut result: Vec<i32> = Vec::new();
let mut tmp: i32 = 1;
let mut j: usize = length - 1;
while j >= 0 {
tmp += digits[j];
result.insert(0, tmp % 10);
tmp /= 10;
if j == 0 {
break;
} else {
j -= 1;
}
}
while tmp > 0 {
result.insert(0, tmp % 10);
tmp /= 10;
}
result
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_plus_one() {
assert_eq!(Solution::plus_one(vec![1,2,3]), vec![1,2,4]);
assert_eq!(Solution::plus_one(vec![9,9,9]), vec![1,0,0,0]);
assert_eq!(Solution::plus_one(vec![9]), vec![1,0]);
}
} |
use std::fmt;
/// Standard datatype for saving position of object in space
pub struct Coordinates {
pub x: i32,
pub y: i32,
pub z: i32,
}
impl fmt::Display for Coordinates {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{},{},{}]", self.x, self.y, self.z)
}
}
impl Coordinates {
/// Creates new set of coordinates in the origin
pub fn new_default() -> Coordinates {
Coordinates { x: 0, y: 0, z: 0 }
}
/// Creates new set of coordinates on specific place in universe
pub fn new(x: i32, y: i32, z: i32) -> Coordinates {
Coordinates { x: x, y: y, z: z }
}
}
|
use anyhow::{Context, Result};
use clap::ArgMatches;
use crate::cfg::Cfg;
use crate::cli::cfg::get_cfg;
use crate::cli::settings::{Settings};
use crate::cli::terminal::message::success;
pub fn r#use(app: &ArgMatches) -> Result<()> {
let mut cfg = get_cfg()?;
cfg.sync_local_to_global()?;
let cfg = cfg;
let mut settings: Settings = (&cfg).into();
if app.is_present("environment") {
if let Some(setup) = app.value_of_lossy("setup_or_environment") {
settings.set_setup(setup.to_string());
}
if let Some(env) = app.value_of_lossy("environment") {
settings.set_env(env.to_string());
}
} else {
if let Some(setup_or_env) = app.value_of_lossy("setup_or_environment") {
if settings.env().is_ok() {
settings.set_env(setup_or_env.to_string());
} else {
settings.set_setup(setup_or_env.to_string());
}
}
}
if app.is_present("unset") {
unuse_workflow(&cfg)?;
cfg.save()?;
success("unset current setup");
} else {
use_workflow(&cfg, &settings)?;
cfg.save()?;
success(format!("your current setup is `{}`", settings).as_str());
}
Ok(())
}
pub fn unuse_workflow(cfg: &Cfg) -> Result<()> {
let global_project = cfg.current_project()?;
let mut global_project = global_project.borrow_mut();
global_project.unset_current_setup();
Ok(())
}
pub fn use_workflow(cfg: &Cfg, settings: &Settings) -> Result<()> {
let setup_name = settings.setup()?;
let setup = cfg.current_setup(setup_name)?;
let global_project = cfg.current_project()?;
let mut global_project = global_project.borrow_mut();
let setup_name = settings.setup()?;
global_project.set_current_setup_name(setup_name.to_owned());
if let Ok(env_name) = settings.env() {
setup
.env_file(env_name)
.context(format!("fail to found env {:?}", env_name))?;
global_project.set_current_env_name(env_name.to_owned());
}
Ok(())
}
|
// Copyright 2019 The vault713 Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::client::BtcNodeClient;
use super::types::{BtcBuyerContext, BtcData, BtcSellerContext};
use crate::grin_core::core::Committed;
use crate::grin_keychain::{Identifier, Keychain, SwitchCommitmentType};
use crate::grin_util::secp::aggsig::export_secnonce_single as generate_nonce;
use crate::grin_util::secp::pedersen;
use crate::grin_util::secp::Message;
use crate::grin_util::Mutex;
use crate::swap::bitcoin::types::BtcTtansaction;
use crate::swap::bitcoin::Output;
use crate::swap::ethereum::*;
use crate::swap::fsm::machine::StateMachine;
use crate::swap::fsm::{buyer_swap, seller_swap};
use crate::swap::message::SecondaryUpdate;
use crate::swap::types::{
BuyerContext, Context, Currency, RoleContext, SecondaryBuyerContext, SecondarySellerContext,
SellerContext, SwapTransactionsConfirmations,
};
use crate::swap::{ErrorKind, SellApi, Swap, SwapApi};
use crate::{NodeClient, Slate};
use bitcoin::{Script, Txid};
use failure::_core::marker::PhantomData;
use std::sync::Arc;
/// SwapApi trait implementaiton for BTC
#[derive(Clone)]
pub struct BtcSwapApi<'a, C, B>
where
C: NodeClient + 'a,
B: BtcNodeClient + 'a,
{
/// Currency. BTC - it is a BTC family. There are some tweaks for different coins.
secondary_currency: Currency,
/// Client for MWC node
pub node_client: Arc<C>,
/// Primary Client for BTC electrumx node
pub btc_node_client1: Arc<Mutex<B>>,
/// Secondary Client for BTC electrumx node
pub btc_node_client2: Arc<Mutex<B>>,
phantom: PhantomData<&'a C>,
}
impl<'a, C, B> BtcSwapApi<'a, C, B>
where
C: NodeClient + 'a,
B: BtcNodeClient + 'a,
{
/// Create BTC Swap API instance
pub fn new(
secondary_currency: Currency,
node_client: Arc<C>,
btc_node_client1: Arc<Mutex<B>>,
btc_node_client2: Arc<Mutex<B>>,
) -> Self {
Self {
secondary_currency,
node_client,
btc_node_client1,
btc_node_client2,
phantom: PhantomData,
}
}
/// For tests doesn't make sense to use any failover
pub fn new_test(node_client: Arc<C>, btc_node_client: Arc<Mutex<B>>) -> Self {
Self {
secondary_currency: Currency::Btc,
node_client,
btc_node_client1: btc_node_client.clone(),
btc_node_client2: btc_node_client,
phantom: PhantomData,
}
}
/// Clone instance
pub fn clone(&self) -> Self {
Self {
secondary_currency: self.secondary_currency.clone(),
node_client: self.node_client.clone(),
btc_node_client1: self.btc_node_client1.clone(),
btc_node_client2: self.btc_node_client2.clone(),
phantom: PhantomData,
}
}
/// Update swap.secondary_data with a roll back script.
pub(crate) fn script(&self, swap: &Swap) -> Result<Script, ErrorKind> {
let btc_data = swap.secondary_data.unwrap_btc()?;
Ok(btc_data.script(
swap.redeem_public
.as_ref()
.ok_or(ErrorKind::UnexpectedAction(
"swap.redeem_public value is not defined. Method BtcSwapApi::script"
.to_string(),
))?,
swap.get_time_secondary_lock_script() as u64,
)?)
}
/// Check BTC amount at the chain.
/// Return output with at least 1 confirmations because it is needed for refunds or redeems. Both party want to take everything
pub(crate) fn btc_balance(
&self,
swap: &Swap,
input_script: &Script,
confirmations_needed: u64,
) -> Result<(u64, u64, u64, Vec<Output>), ErrorKind> {
let btc_data = swap.secondary_data.unwrap_btc()?;
let address = btc_data.address(self.secondary_currency, input_script, swap.network)?;
debug_assert!(address.len() > 0);
let outputs = match self
.btc_node_client1
.lock()
.unspent(self.secondary_currency, &address[0])
{
Ok(r) => r,
Err(_) => self
.btc_node_client2
.lock()
.unspent(self.secondary_currency, &address[0])?,
};
let height = match self.btc_node_client1.lock().height() {
Ok(r) => r,
Err(_) => self.btc_node_client2.lock().height()?,
};
let mut pending_amount = 0;
let mut confirmed_amount = 0;
let mut least_confirmations = None;
let mut confirmed_outputs = Vec::new();
for output in outputs {
if output.height == 0 {
// Output in mempool
least_confirmations = Some(0);
pending_amount += output.value;
} else {
let confirmations = height.saturating_sub(output.height) + 1;
if confirmations >= confirmations_needed {
// Enough confirmations
confirmed_amount += output.value;
} else {
// Not yet enough confirmations
if least_confirmations
.map(|least| confirmations < least)
.unwrap_or(true)
{
least_confirmations = Some(confirmations);
}
pending_amount += output.value;
}
confirmed_outputs.push(output);
}
}
Ok((
pending_amount,
confirmed_amount,
least_confirmations.unwrap_or(0),
confirmed_outputs,
))
}
/// Seller builds the transaction to redeem their Bitcoins, Status::Redeem
/// Updating data: swap.secondary_data.redeem_tx
fn seller_build_redeem_tx<K: Keychain>(
&self,
keychain: &K,
swap: &Swap,
context: &Context,
input_script: &Script,
) -> Result<BtcTtansaction, ErrorKind> {
let cosign_id = &context.unwrap_seller()?.unwrap_btc()?.cosign;
let redeem_address_str = swap.unwrap_seller()?.0.clone();
self.secondary_currency
.validate_address(&redeem_address_str)?;
let cosign_secret = keychain.derive_key(0, cosign_id, SwitchCommitmentType::None)?;
let redeem_secret = SellApi::calculate_redeem_secret(keychain, swap)?;
let (pending_amount, confirmed_amount, _, mut conf_outputs) =
self.btc_balance(swap, input_script, 0)?;
if pending_amount + confirmed_amount == 0 {
return Err(ErrorKind::Generic(
"Not found outputs to redeem. Probably Buyer already refund it".to_string(),
));
}
// Sort needed for transaction hash stabilization. We want all calls return the same Hash
conf_outputs.sort_by(|a, b| a.out_point.txid.cmp(&b.out_point.txid));
let secondary_currency = self.secondary_currency.clone();
let secp = keychain.secp();
let redeem_script_sig = |msg: &Message| {
BtcData::redeem_script_sig(
&secondary_currency,
input_script,
&mut secp.sign(msg, &cosign_secret)?,
&mut secp.sign(msg, &redeem_secret)?,
)
};
let (btc_transaction, _, _, _) = BtcData::spend_lock_transaction(
&self.secondary_currency,
&redeem_address_str,
&input_script,
swap.secondary_fee,
0,
&conf_outputs,
redeem_script_sig,
)?;
Ok(btc_transaction)
}
fn buyer_refund<K: Keychain>(
&self,
keychain: &K,
context: &Context,
swap: &mut Swap,
refund_address: &String,
input_script: &Script,
post_tx: bool,
) -> Result<(), ErrorKind> {
let (pending_amount, confirmed_amount, _, conf_outputs) =
self.btc_balance(swap, input_script, 0)?;
if pending_amount + confirmed_amount == 0 {
return Err(ErrorKind::Generic(
"Not found outputs to refund. Probably Seller already redeem it".to_string(),
));
}
let refund_key = keychain.derive_key(
0,
&context.unwrap_buyer()?.unwrap_btc()?.refund,
SwitchCommitmentType::None,
)?;
let btc_lock_time = swap.get_time_secondary_lock_script();
let secp = keychain.secp();
let secondary_currency = self.secondary_currency.clone();
let refund_script_sig = |msg: &Message| {
BtcData::refund_script_sig(
&secondary_currency,
&mut secp.sign(msg, &refund_key)?,
input_script,
)
};
let (refund_tx, _, _, _) = BtcData::spend_lock_transaction(
&self.secondary_currency,
refund_address,
input_script,
swap.secondary_fee,
btc_lock_time,
&conf_outputs,
refund_script_sig,
)?;
let tx = refund_tx.tx.clone();
if post_tx {
if let Err(_) = self.btc_node_client1.lock().post_tx(tx.clone()) {
self.btc_node_client2.lock().post_tx(tx)?;
}
}
let btc_data = swap.secondary_data.unwrap_btc_mut()?;
btc_data.refund_tx = Some(refund_tx.txid);
btc_data.tx_fee = Some(swap.secondary_fee);
Ok(())
}
fn get_slate_confirmation_number(
&self,
mwc_tip: &u64,
slate: &Slate,
outputs_ok: bool,
) -> Result<Option<u64>, ErrorKind> {
let result: Option<u64> = if slate.tx.kernels().is_empty() {
None
} else {
debug_assert!(slate.tx.kernels().len() == 1);
let kernel = &slate.tx.kernels()[0].excess;
if kernel.0.to_vec().iter().any(|v| *v != 0) {
// kernel is non zero - we can check transaction by kernel
match self
.node_client
.get_kernel(kernel, Some(slate.height), None)?
{
Some((_tx_kernel, height, _mmr_index)) => {
Some(mwc_tip.saturating_sub(height) + 1)
}
None => None,
}
} else {
if outputs_ok {
// kernel is not valid, still can use outputs.
let wallet_outputs: Vec<pedersen::Commitment> = slate.tx.outputs_committed();
let res = self.node_client.get_outputs_from_node(&wallet_outputs)?;
let height = res.values().map(|v| v.1).max();
match height {
Some(h) => Some(mwc_tip.saturating_sub(h) + 1),
None => None,
}
} else {
None
}
}
};
Ok(result)
}
/// Retrieve confirmation number for BTC transaction.
fn get_btc_confirmation_number(
&self,
btc_tip: &u64,
tx_hash: Option<Txid>,
) -> Result<Option<u64>, ErrorKind> {
let result: Option<u64> = match tx_hash {
None => None,
Some(tx_hash) => {
let height = match self.btc_node_client1.lock().transaction(&tx_hash) {
Ok(h) => h,
Err(_) => self.btc_node_client2.lock().transaction(&tx_hash)?,
};
match height {
None => None,
Some(0) => Some(0),
Some(h) => Some(btc_tip.saturating_sub(h) + 1),
}
}
};
Ok(result)
}
}
impl<'a, K, C, B> SwapApi<K> for BtcSwapApi<'a, C, B>
where
K: Keychain + 'a,
C: NodeClient + 'a,
B: BtcNodeClient + 'a,
{
fn context_key_count(
&mut self,
_keychain: &K,
secondary_currency: Currency,
_is_seller: bool,
) -> Result<usize, ErrorKind> {
match secondary_currency.is_btc_family() {
true => Ok(4),
_ => return Err(ErrorKind::UnexpectedCoinType),
}
}
fn create_context(
&mut self,
keychain: &K,
_ethereum_wallet: Option<&EthereumWallet>,
secondary_currency: Currency,
is_seller: bool,
inputs: Option<Vec<(Identifier, Option<u64>, u64)>>,
change_amount: u64,
keys: Vec<Identifier>,
parent_key_id: Identifier,
) -> Result<Context, ErrorKind> {
match secondary_currency.is_btc_family() {
true => (),
_ => return Err(ErrorKind::UnexpectedCoinType),
}
let secp = keychain.secp();
let mut keys = keys.into_iter();
let role_context = if is_seller {
RoleContext::Seller(SellerContext {
parent_key_id: parent_key_id,
inputs: inputs.ok_or(ErrorKind::UnexpectedRole(
"Fn create_context() for seller not found inputs".to_string(),
))?,
change_output: keys.next().unwrap(),
change_amount,
refund_output: keys.next().unwrap(),
secondary_context: SecondarySellerContext::Btc(BtcSellerContext {
cosign: keys.next().unwrap(),
}),
})
} else {
RoleContext::Buyer(BuyerContext {
parent_key_id: parent_key_id,
output: keys.next().unwrap(),
redeem: keys.next().unwrap(),
secondary_context: SecondaryBuyerContext::Btc(BtcBuyerContext {
refund: keys.next().unwrap(),
}),
})
};
Ok(Context {
multisig_key: keys.next().unwrap(),
multisig_nonce: generate_nonce(secp)?,
lock_nonce: generate_nonce(secp)?,
refund_nonce: generate_nonce(secp)?,
redeem_nonce: generate_nonce(secp)?,
role_context,
})
}
/// Seller creates a swap offer
fn create_swap_offer(
&mut self,
keychain: &K,
context: &Context,
primary_amount: u64,
secondary_amount: u64,
secondary_currency: Currency,
secondary_redeem_address: String,
seller_lock_first: bool,
mwc_confirmations: u64,
secondary_confirmations: u64,
message_exchange_time_sec: u64,
redeem_time_sec: u64,
communication_method: String,
buyer_destination_address: String,
electrum_node_uri1: Option<String>,
electrum_node_uri2: Option<String>,
eth_swap_contract_address: Option<String>,
erc20_swap_contract_address: Option<String>,
eth_infura_project_id: Option<String>,
_eth_redirect_out_wallet: Option<bool>,
dry_run: bool,
tag: Option<String>,
) -> Result<Swap, ErrorKind> {
// Checking if address is valid
secondary_currency
.validate_address(&secondary_redeem_address)
.map_err(|e| {
ErrorKind::Generic(format!(
"Unable to parse secondary currency redeem address {}, {}",
secondary_redeem_address, e
))
})?;
match secondary_currency.is_btc_family() {
true => (),
_ => return Err(ErrorKind::UnexpectedCoinType),
}
let height = self.node_client.get_chain_tip()?.0;
let mut swap = SellApi::create_swap_offer(
keychain,
context,
primary_amount,
secondary_amount,
secondary_currency,
secondary_redeem_address,
height,
seller_lock_first,
mwc_confirmations,
secondary_confirmations,
message_exchange_time_sec,
redeem_time_sec,
communication_method,
buyer_destination_address,
electrum_node_uri1,
electrum_node_uri2,
eth_swap_contract_address,
erc20_swap_contract_address,
eth_infura_project_id,
Some(false),
dry_run,
tag,
)?;
let btc_data = BtcData::new(keychain, context.unwrap_seller()?.unwrap_btc()?)?;
swap.secondary_data = btc_data.wrap();
Ok(swap)
}
/// Build secondary update part of the offer message
fn build_offer_message_secondary_update(
&self,
_keychain: &K, // To make compiler happy
swap: &mut Swap,
) -> SecondaryUpdate {
let btc_data = swap
.secondary_data
.unwrap_btc()
.expect("Secondary data of unexpected type");
SecondaryUpdate::BTC(btc_data.offer_update())
}
/// Build secondary update part of the accept offer message
fn build_accept_offer_message_secondary_update(
&self,
_keychain: &K, // To make compiler happy
swap: &mut Swap,
) -> SecondaryUpdate {
let btc_data = swap
.secondary_data
.unwrap_btc()
.expect("Secondary data of unexpected type");
SecondaryUpdate::BTC(btc_data.accept_offer_update())
}
fn publish_secondary_transaction(
&self,
keychain: &K,
swap: &mut Swap,
context: &Context,
post_tx: bool,
) -> Result<(), ErrorKind> {
assert!(swap.is_seller());
let input_script = self.script(swap)?;
let btc_tx = self.seller_build_redeem_tx(keychain, swap, context, &input_script)?;
if post_tx {
if let Err(_) = self.btc_node_client1.lock().post_tx(btc_tx.tx.clone()) {
self.btc_node_client2.lock().post_tx(btc_tx.tx)?;
}
}
let btc_data = swap.secondary_data.unwrap_btc_mut()?;
btc_data.redeem_tx = Some(btc_tx.txid);
btc_data.tx_fee = Some(swap.secondary_fee);
Ok(())
}
/// Request confirmation numberss for all transactions that are known and in the in the swap
fn request_tx_confirmations(
&self,
_keychain: &K, // keychain is kept for Type. Compiler need to understand all types
swap: &Swap,
) -> Result<SwapTransactionsConfirmations, ErrorKind> {
let mwc_tip = self.node_client.get_chain_tip()?.0;
let is_seller = swap.is_seller();
let mwc_lock_conf =
self.get_slate_confirmation_number(&mwc_tip, &swap.lock_slate, !is_seller)?;
let mwc_redeem_conf =
self.get_slate_confirmation_number(&mwc_tip, &swap.redeem_slate, is_seller)?;
let mwc_refund_conf =
self.get_slate_confirmation_number(&mwc_tip, &swap.refund_slate, !is_seller)?;
let btc_tip = match self.btc_node_client1.lock().height() {
Ok(r) => r,
Err(_) => self.btc_node_client2.lock().height()?,
};
let btc_data = swap.secondary_data.unwrap_btc()?;
let secondary_redeem_conf = self.get_btc_confirmation_number(
&btc_tip,
btc_data.redeem_tx.as_ref().map(|h| h.clone().into()),
)?;
let secondary_refund_conf = self.get_btc_confirmation_number(
&btc_tip,
btc_data.refund_tx.as_ref().map(|h| h.clone().into()),
)?;
// BTC lock account...
// Checking Amount, it can be too hight as well
let mut secondary_lock_amount = 0;
let mut least_confirmations = None;
if let Ok(input_script) = self.script(swap) {
if let Ok(address) =
btc_data.address(swap.secondary_currency, &input_script, swap.network)
{
debug_assert!(address.len() > 0);
let outputs = match self
.btc_node_client1
.lock()
.unspent(swap.secondary_currency, &address[0])
{
Ok(r) => r,
Err(_) => self
.btc_node_client2
.lock()
.unspent(swap.secondary_currency, &address[0])?,
};
for output in outputs {
secondary_lock_amount += output.value;
if output.height == 0 {
// Output in mempool
least_confirmations = Some(0);
} else {
let confirmations = btc_tip.saturating_sub(output.height) + 1;
if confirmations < least_confirmations.unwrap_or(std::i32::MAX as u64) {
least_confirmations = Some(confirmations);
}
}
}
}
}
Ok(SwapTransactionsConfirmations {
mwc_tip,
mwc_lock_conf,
mwc_redeem_conf,
mwc_refund_conf,
secondary_tip: btc_tip,
secondary_lock_conf: least_confirmations,
secondary_lock_amount,
secondary_redeem_conf,
secondary_refund_conf,
})
}
/// Check How much BTC coins are locked on the chain
/// Return output with at least 1 confirmations because it is needed for refunds or redeems. Both party want to take everything
/// Return: (<pending_amount>, <confirmed_amount>, <least_confirmations>)
fn request_secondary_lock_balance(
&self,
swap: &Swap,
confirmations_needed: u64,
) -> Result<(u64, u64, u64), ErrorKind> {
let input_script = self.script(swap)?;
let (pending_amount, confirmed_amount, least_confirmations, _outputs) =
self.btc_balance(swap, &input_script, confirmations_needed)?;
Ok((pending_amount, confirmed_amount, least_confirmations))
}
// Build state machine that match the swap data
fn get_fsm(&self, keychain: &K, swap: &Swap) -> StateMachine {
let kc = Arc::new(keychain.clone());
let nc = self.node_client.clone();
let b: Box<dyn SwapApi<K> + 'a> = Box::new((*self).clone());
let swap_api = Arc::new(b);
if swap.is_seller() {
StateMachine::new(vec![
Box::new(seller_swap::SellerOfferCreated::new()),
Box::new(seller_swap::SellerSendingOffer::new(
kc.clone(),
swap_api.clone(),
)),
Box::new(seller_swap::SellerWaitingForAcceptanceMessage::new(
kc.clone(),
)),
Box::new(seller_swap::SellerWaitingForBuyerLock::new(
swap_api.clone(),
)),
Box::new(seller_swap::SellerPostingLockMwcSlate::new(nc.clone())),
Box::new(seller_swap::SellerWaitingForLockConfirmations::new(
kc.clone(),
swap_api.clone(),
)),
Box::new(seller_swap::SellerWaitingForInitRedeemMessage::new(
kc.clone(),
)),
Box::new(seller_swap::SellerSendingInitRedeemMessage::new(nc.clone())),
Box::new(seller_swap::SellerWaitingForBuyerToRedeemMwc::new(
nc.clone(),
)),
Box::new(seller_swap::SellerRedeemSecondaryCurrency::new(
kc.clone(),
nc.clone(),
swap_api.clone(),
)),
Box::new(seller_swap::SellerWaitingForRedeemConfirmations::new(
nc.clone(),
swap_api.clone(),
)),
Box::new(seller_swap::SellerSwapComplete::new()),
Box::new(seller_swap::SellerWaitingForRefundHeight::new(nc.clone())),
Box::new(seller_swap::SellerPostingRefundSlate::new(nc.clone())),
Box::new(seller_swap::SellerWaitingForRefundConfirmations::new()),
Box::new(seller_swap::SellerCancelledRefunded::new()),
Box::new(seller_swap::SellerCancelled::new()),
])
} else {
StateMachine::new(vec![
Box::new(buyer_swap::BuyerOfferCreated::new()),
Box::new(buyer_swap::BuyerSendingAcceptOfferMessage::new(
kc.clone(),
swap_api.clone(),
)),
Box::new(buyer_swap::BuyerWaitingForSellerToLock::new()),
Box::new(buyer_swap::BuyerPostingSecondaryToMultisigAccount::new(
swap_api.clone(),
)),
Box::new(buyer_swap::BuyerWaitingForLockConfirmations::new(
kc.clone(),
swap_api.clone(),
)),
Box::new(buyer_swap::BuyerSendingInitRedeemMessage::new()),
Box::new(buyer_swap::BuyerWaitingForRespondRedeemMessage::new(
kc.clone(),
)),
Box::new(buyer_swap::BuyerRedeemMwc::new(nc.clone())),
Box::new(buyer_swap::BuyerWaitForRedeemMwcConfirmations::new()),
Box::new(buyer_swap::BuyerSwapComplete::new()),
Box::new(buyer_swap::BuyerWaitingForRefundTime::new()),
Box::new(buyer_swap::BuyerPostingRefundForSecondary::new(
kc.clone(),
swap_api.clone(),
)),
Box::new(buyer_swap::BuyerWaitingForRefundConfirmations::new(
swap_api.clone(),
)),
Box::new(buyer_swap::BuyerCancelledRefunded::new()),
Box::new(buyer_swap::BuyerCancelled::new()),
])
}
}
/// Get a secondary addresses for the lock account
/// We can have several addresses because of different formats
fn get_secondary_lock_address(&self, swap: &Swap) -> Result<Vec<String>, ErrorKind> {
let input_script = self.script(swap)?;
let address = swap.secondary_data.unwrap_btc()?.address(
swap.secondary_currency,
&input_script,
swap.network,
)?;
Ok(address)
}
/// Check if tx fee for the secondary is different from the posted
fn is_secondary_tx_fee_changed(&self, swap: &Swap) -> Result<bool, ErrorKind> {
Ok(swap.secondary_data.unwrap_btc()?.tx_fee != Some(swap.secondary_fee))
}
/// Post BTC refund transaction
fn post_secondary_refund_tx(
&self,
keychain: &K,
context: &Context,
swap: &mut Swap,
refund_address: Option<String>,
post_tx: bool,
) -> Result<(), ErrorKind> {
assert!(!swap.is_seller());
let refund_address_str = refund_address.ok_or(ErrorKind::Generic(
"Please define refund address".to_string(),
))?;
swap.secondary_currency
.validate_address(&refund_address_str)?;
let input_script = self.script(swap)?;
self.buyer_refund(
keychain,
context,
swap,
&refund_address_str,
&input_script,
post_tx,
)?;
Ok(())
}
/// deposit secondary currecny to lock account.
fn post_secondary_lock_tx(&self, _swap: &mut Swap) -> Result<(), ErrorKind> {
Ok(())
}
/// transfer amount to dedicated address.
fn transfer_scondary(&self, _swap: &mut Swap) -> Result<(), ErrorKind> {
Ok(())
}
/// Validate clients. We want to be sure that the clients able to acceess the servers
fn test_client_connections(&self) -> Result<(), ErrorKind> {
{
let mut c = self.btc_node_client1.lock();
let name = c.name();
let _ = c.height().map_err(|e| {
ErrorKind::ElectrumNodeClient(format!(
"Unable to contact the primary ElectrumX client {}, {}",
name, e
))
})?;
}
{
let mut c = self.btc_node_client2.lock();
let name = c.name();
let _ = c.height().map_err(|e| {
ErrorKind::ElectrumNodeClient(format!(
"Unable to contact the secondary ElectrumX client {}, {}",
name, e
))
})?;
}
Ok(())
}
}
|
#[cfg(test)]
mod tests {
use super::* ;
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn test_add() {
assert_eq!( 3, add(1,2));
}
#[test]
fn test_add_zero() {
assert_eq!( 0, add(0,0));
}
#[test]
fn test_add_under_zero() {
assert_eq!( 0, add(-1,-1));
}
#[test]
fn test_add_double() {
assert_eq!( 1.2, add_double(1.0, 0.2));
assert_eq!( 0.0, add_double(0.0, 0.0));
assert_eq!( 0.0, add_double(-1.0, -1.0));
}
#[test]
fn test_add_str() {
assert_eq!( "masuda tomoaki", add_str("masuda", "tomoaki"));
assert_eq!( "masuda", add_str("masuda", ""));
assert_eq!( "", add_str("", ""));
}
}
fn add(x: i32, y: i32 ) -> i32 {
// x + y
let ans = x + y ;
if ans < 0 {
0
} else {
ans
}
}
fn add_double(x: f32, y: f32 ) -> f32 {
let ans = x + y;
if ans < 0.0 {
0.0
} else {
ans
}
}
fn add_str( x: &str, y: &str) -> String {
let ans = format!("{} {}", x, y );
ans.trim().to_string()
}
|
use SmolCommon::component::*;
use super::Entity;
use bit_vec::BitVec;
use std::iter::FilterMap;
/// Stores components as a normal vector
pub struct VecStorage<T>{
storage: Vec<Option<T>>,
valid: BitVec,
}
impl<T> VecStorage<T>{
pub fn new() -> Self{
VecStorage{
storage: Vec::new(),
valid: BitVec::new(),
}
}
}
impl<T: Component> ComponentStorage<T> for VecStorage<T>{
/// Gets a reference to a component at the given index (entity)
fn get<'cs>(&'cs self, entity: &usize) -> Option<&'cs T>{
return self.storage.get(*entity).unwrap().as_ref();
}
/// Gets a mutable reference to a component at the given index (entity)
fn get_mut<'cs>(&'cs mut self, entity: &usize) -> Option<&'cs mut T>{
return self.storage.get_mut(*entity).unwrap().as_mut();
}
/// Iterates over the valid components.
fn iter<'cs>(&'cs self) -> Box<(dyn Iterator<Item = (bool, Option<&'cs T>)> + 'cs)>{
Box::new(
self.valid.iter()
.zip(self.storage.iter())
.map(|(v, comp)| (v, comp.as_ref())))
}
/// Mutabley iterates over the valid components.
fn iter_mut<'cs>(&'cs mut self) -> Box<(dyn Iterator<Item = (bool, Option<&'cs mut T>)> + 'cs)>{
Box::new(
self.valid.iter()
.zip(self.storage.iter_mut())
.map(|(v, comp)| (v, comp.as_mut())))
}
/// Puts a component at the given index, can also append new components
fn set<'cs>(&'cs mut self, entity: &usize, comp: T){
// This is bad, but should almost never happen
while *entity >= self.storage.len(){
self.storage.push(None);
self.valid.push(false);
}
*self.storage.get_mut(*entity).unwrap() = Some(comp);
self.valid.set(*entity, true);
}
fn delete<'cs>(&'cs mut self, entity: &usize){
if *entity < self.storage.len(){
*self.storage.get_mut(*entity).unwrap() = None;
self.valid.set(*entity, false);
}
}
}
#[cfg(test)]
mod test{
use super::*;
#[test]
fn create_insert_iter(){
let mut storage = VecStorage::new();
for i in 0..10{
let e = Entity{index: i, generation: 0};
storage.set(&e.index, i);
}
for (n, i) in storage.iter().enumerate(){
let (valid, num) = i;
assert_eq!(n, *num.unwrap());
}
}
#[test]
fn create_insert_delete_iter(){
let mut storage = VecStorage::new();
for i in 0..10{
let e = Entity{index: i, generation: 0};
storage.set(&e.index, i);
}
storage.delete(&Entity{index: 0, generation: 0}.index);
for (n, i) in storage.iter_mut().enumerate(){
let (valid, num) = i;
if n == 0{
assert_eq!(valid, false);
continue;
}
let reference = num.unwrap();
assert_eq!(n, *reference);
*reference *= 2;
}
for (n, i) in storage.iter().enumerate(){
let (valid, num) = i;
if n == 0{
assert_eq!(valid, false);
continue;
}
assert_eq!((n)* 2, *num.unwrap());
}
}
} |
use std::{fmt, cmp};
use std::cmp::{PartialOrd, Ord, Ordering};
use std::hash::{Hash, Hasher};
use std::convert::TryFrom;
use pct_str::PctStr;
use crate::parsing;
use super::Error;
#[derive(Clone, Copy)]
pub struct Fragment<'a> {
/// The fragment slice.
pub(crate) data: &'a [u8]
}
impl<'a> Fragment<'a> {
#[inline]
pub fn as_ref(&self) -> &[u8] {
self.data
}
/// Get the underlying fragment slice as a string slice.
#[inline]
pub fn as_str(&self) -> &str {
unsafe {
std::str::from_utf8_unchecked(&self.data)
}
}
/// Get the underlying fragment slice as a percent-encoded string slice.
#[inline]
pub fn as_pct_str(&self) -> &PctStr {
unsafe {
PctStr::new_unchecked(self.as_str())
}
}
/// Checks if the fragment is empty.
#[inline]
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
}
impl<'a> TryFrom<&'a str> for Fragment<'a> {
type Error = Error;
#[inline]
fn try_from(str: &'a str) -> Result<Fragment<'a>, Error> {
let fragment_len = parsing::parse_fragment(str.as_ref(), 0)?;
if fragment_len < str.len() {
Err(Error::InvalidFragment)
} else {
Ok(Fragment {
data: str.as_ref()
})
}
}
}
impl<'a> fmt::Display for Fragment<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_str().fmt(f)
}
}
impl<'a> fmt::Debug for Fragment<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_str().fmt(f)
}
}
impl<'a> cmp::PartialEq for Fragment<'a> {
fn eq(&self, other: &Fragment) -> bool {
self.as_pct_str() == other.as_pct_str()
}
}
impl<'a> Eq for Fragment<'a> { }
impl<'a> cmp::PartialEq<&'a str> for Fragment<'a> {
fn eq(&self, other: &&'a str) -> bool {
self.as_str() == *other
}
}
impl<'a> PartialOrd for Fragment<'a> {
fn partial_cmp(&self, other: &Fragment<'a>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<'a> Ord for Fragment<'a> {
fn cmp(&self, other: &Fragment<'a>) -> Ordering {
self.as_pct_str().cmp(other.as_pct_str())
}
}
impl<'a> Hash for Fragment<'a> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.as_pct_str().hash(hasher)
}
}
|
use crate::conversions::{GpuBuffer};
use cgmath::{Decomposed, Deg, Matrix4, Quaternion, Rotation3, Vector3, Vector4};
use std::mem::size_of;
use wgpu::BufferUsageFlags;
pub struct ModelGroup {
pub name: String,
pub index_buf: GpuBuffer,
pub vertex_buf: GpuBuffer,
pub bind_group: wgpu::BindGroup,
pub models: Vec<Model>,
mvp_buffer: Option<GpuBuffer>,
}
impl ModelGroup {
pub fn new(
name: impl Into<String>,
index_buf: GpuBuffer,
vertex_buf: GpuBuffer,
bind_group: wgpu::BindGroup,
) -> ModelGroup {
ModelGroup {
name: name.into(),
index_buf,
vertex_buf,
bind_group,
models: Vec::new(),
mvp_buffer: None,
}
}
pub fn add_model(&mut self, model: Model) {
self.models.push(model);
}
pub fn update_mvp_buffer(&mut self, device: &wgpu::Device) {
self.mvp_buffer = Some(GpuBuffer::from_transformed_slice(
device,
BufferUsageFlags::TRANSFER_SRC,
&self.models,
|model| model.model_matrix(),
));
}
pub fn mvp_buffer(&self) -> &wgpu::Buffer {
match &self.mvp_buffer {
Some(buf) => buf.buffer(),
None => unimplemented!(),
}
}
pub fn buffer_descriptor() -> wgpu::VertexBufferDescriptor<'static> {
wgpu::VertexBufferDescriptor {
stride: size_of::<Matrix4<f32>>() as u32,
step_mode: wgpu::InputStepMode::Instance,
attributes: &[
wgpu::VertexAttributeDescriptor {
attribute_index: 3,
format: wgpu::VertexFormat::Float4,
offset: 0,
},
wgpu::VertexAttributeDescriptor {
attribute_index: 4,
format: wgpu::VertexFormat::Float4,
offset: (size_of::<f32>() * 4) as u32,
},
wgpu::VertexAttributeDescriptor {
attribute_index: 5,
format: wgpu::VertexFormat::Float4,
offset: (size_of::<f32>() * 4 * 2) as u32,
},
wgpu::VertexAttributeDescriptor {
attribute_index: 6,
format: wgpu::VertexFormat::Float4,
offset: (size_of::<f32>() * 4 * 3) as u32,
},
],
}
}
}
pub struct Model {
transform: Decomposed<Vector3<f32>, Quaternion<f32>>,
}
impl Model {
pub fn new(position: Vector3<f32>) -> Model {
Model {
transform: Decomposed {
scale: 1.0,
rot: Quaternion::from_angle_y(Deg(0.0f32)),
disp: position,
},
}
}
pub fn translate(&mut self, movement: Vector3<f32>) {
self.transform.disp += movement;
}
pub fn model_matrix(&self) -> Matrix4<f32> {
self.transform.into()
}
}
|
#![feature(proc_macro_diagnostic, proc_macro_span)]
#![feature(core_intrinsics, decl_macro)]
#![recursion_limit="256"]
extern crate syn;
extern crate proc_macro;
extern crate proc_macro2;
#[macro_use] extern crate quote;
extern crate rocket;
mod parser;
mod spanned;
mod ext;
use parser::Result as PResult;
use proc_macro::{Span, TokenStream};
use spanned::Spanned;
use ext::*;
use syn::*;
const NO_FIELDS_ERR: &str = "variants in `FromFormValue` derives cannot have fields";
const NO_GENERICS: &str = "enums with generics cannot derive `FromFormValue`";
const ONLY_ENUMS: &str = "`FromFormValue` can only be derived for enums";
const EMPTY_ENUM_WARN: &str = "deriving `FromFormValue` for empty enum";
fn validate_input(input: DeriveInput) -> PResult<DataEnum> {
// This derive doesn't support generics. Error out if there are generics.
if !input.generics.params.is_empty() {
return Err(input.generics.span().error(NO_GENERICS));
}
// This derive only works for enums. Error out if the input is not an enum.
let input_span = input.span();
let data = input.data.into_enum().ok_or_else(|| input_span.error(ONLY_ENUMS))?;
// This derive only works for variants that are nullary.
for variant in data.variants.iter() {
if !variant.fields.is_empty() {
return Err(variant.span().error(NO_FIELDS_ERR));
}
}
// Emit a warning if the enum is empty.
if data.variants.is_empty() {
Span::call_site().warning(EMPTY_ENUM_WARN).emit();
}
Ok(data)
}
fn real_derive_from_form_value(input: TokenStream) -> PResult<TokenStream> {
// Parse the input `TokenStream` as a `syn::DeriveInput`, an AST.
let input: DeriveInput = syn::parse(input).map_err(|e| {
Span::call_site().error(format!("error: failed to parse input: {:?}", e))
})?;
// Validate the enum.
let name = input.ident.clone();
let enum_data = validate_input(input)?;
// Create iterators over the identifers as idents and as strings.
let variant_strs = enum_data.variants.iter().map(|v| v.ident.to_string());
let variant_idents = enum_data.variants.iter().map(|v| &v.ident);
let names = ::std::iter::repeat(&name);
// Generate the implementation.
Ok(quote! {
impl<'v> ::rocket::request::FromFormValue<'v> for #name {
type Error = &'v ::rocket::http::RawStr;
fn from_form_value(v: &'v ::rocket::http::RawStr) -> ::std::result::Result<Self, Self::Error> {
#(if v.as_uncased_str() == #variant_strs {
return Ok(#names::#variant_idents);
})*
Err(v)
}
}
}.into())
}
#[proc_macro_derive(FromFormValue)]
pub fn derive_from_form_value(input: TokenStream) -> TokenStream {
real_derive_from_form_value(input)
.unwrap_or_else(|diag| {
diag.emit();
TokenStream::new()
})
}
|
//! Fast, but limited allocator.
use std::mem;
use std::ops::{Index, IndexMut};
use std::vec::Vec;
/// A struct representing an entry to `TypedArena<T>`
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Entry {
chunk_index: usize,
block_index: usize,
}
enum Block<T> {
Occupied(T),
Vacant(Option<Entry>),
}
/// A fast, but limited allocator that only allocates a single type of object.
///
/// All objects inside the arena will be destroyed when the typed arena is destroyed. This typed
/// arena also supports deallocation of objects once they are allocated and yields both mutable and
/// immutable references to objects. Additionally, the underlying container is simply a `Vec` so
/// the code itself is very simple and uses no unsafe code. When the typed arena is full, it will
/// allocate another chunk of objects so no memory is reallocated.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// let mut arena = TypedArena::new(1024);
///
/// let x = arena.allocate(1);
/// assert_eq!(arena[x], 1);
///
/// arena[x] += 1;
/// assert_eq!(arena[x], 2);
///
/// assert_eq!(arena.free(&x), 2);
/// ```
pub struct TypedArena<T> {
head: Option<Entry>,
chunks: Vec<Vec<Block<T>>>,
chunk_size: usize,
size: usize,
capacity: usize,
}
impl<T> TypedArena<T> {
fn is_valid_entry(&self, entry: &Entry) -> bool {
entry.chunk_index < self.chunks.len() && entry.block_index < self.chunks[entry.chunk_index].len()
}
/// Constructs a new, empty `TypedArena<T>` with a specific number of objects per chunk.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// // creates a new TypedArena<T> that contains a maximum of 1024 u32's per chunk
/// let arena: TypedArena<u32> = TypedArena::new(1024);
/// ```
pub fn new(chunk_size: usize) -> Self {
TypedArena {
head: None,
chunks: Vec::new(),
chunk_size,
size: 0,
capacity: 0,
}
}
/// Allocates an object in the typed arena and returns an Entry. The Entry can later be used to
/// index retrieve mutable and immutable references to the object, and dellocate the object.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// let mut arena = TypedArena::new(1024);
/// let x = arena.allocate(0);
/// ```
pub fn allocate(&mut self, value: T) -> Entry {
if self.size == self.capacity {
self.chunks.push(Vec::with_capacity(self.chunk_size));
self.capacity += self.chunk_size;
}
self.size += 1;
match self.head.take() {
None => {
let chunk_count = self.chunks.len();
let mut last_chunk = &mut self.chunks[chunk_count - 1];
last_chunk.push(Block::Occupied(value));
Entry {
chunk_index: chunk_count - 1,
block_index: last_chunk.len() - 1,
}
},
Some(entry) => {
let vacant_block = mem::replace(
&mut self.chunks[entry.chunk_index][entry.block_index],
Block::Occupied(value),
);
match vacant_block {
Block::Vacant(next_entry) => {
let ret = entry;
self.head = next_entry;
ret
},
Block::Occupied(_) => unreachable!(),
}
},
}
}
/// Deallocates an object in the typed arena and returns the object.
///
/// # Panics
/// Panics if entry corresponds to an invalid or vacant value.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// let mut arena = TypedArena::new(1024);
/// let x = arena.allocate(0);
/// assert_eq!(arena.free(&x), 0);
/// ```
pub fn free(&mut self, entry: &Entry) -> T {
if !self.is_valid_entry(entry) {
panic!("Attempting to free invalid block.");
}
let old_block = mem::replace(
&mut self.chunks[entry.chunk_index][entry.block_index],
Block::Vacant(self.head.take()),
);
match old_block {
Block::Vacant(_) => panic!("Attempting to free vacant block."),
Block::Occupied(value) => {
self.size -= 1;
self.head = Some(Entry {
chunk_index: entry.chunk_index,
block_index: entry.block_index,
});
value
},
}
}
/// Returns an immutable reference to an object in the typed arena. Returns `None` if the entry
/// does not correspond to a valid object.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// let mut arena = TypedArena::new(1024);
/// let x = arena.allocate(0);
/// assert_eq!(arena.get(&x), Some(&0));
/// ```
pub fn get(&self, entry: &Entry) -> Option<&T> {
if !self.is_valid_entry(entry) {
return None;
}
match self.chunks[entry.chunk_index][entry.block_index] {
Block::Occupied(ref value) => Some(value),
Block::Vacant(_) => None,
}
}
/// Returns a mutable reference to an object in the typed arena. Returns `None` if the entry
/// does not correspond to a valid object.
///
/// # Examples
/// ```
/// use extended_collections::arena::TypedArena;
///
/// let mut arena = TypedArena::new(1024);
/// let x = arena.allocate(0);
/// assert_eq!(arena.get_mut(&x), Some(&mut 0));
/// ```
pub fn get_mut(&mut self, entry: &Entry) -> Option<&mut T> {
if !self.is_valid_entry(entry) {
return None;
}
match self.chunks[entry.chunk_index][entry.block_index] {
Block::Occupied(ref mut value) => Some(value),
Block::Vacant(_) => None,
}
}
}
impl<T> Index<Entry> for TypedArena<T> {
type Output = T;
fn index(&self, entry: Entry) -> &Self::Output {
self.get(&entry).expect("Entry out of bounds.")
}
}
impl<T> IndexMut<Entry> for TypedArena<T> {
fn index_mut(&mut self, entry: Entry) -> &mut Self::Output {
self.get_mut(&entry).expect("Entry out of bounds.")
}
}
#[cfg(test)]
mod tests {
use super::Entry;
use super::TypedArena;
#[test]
#[should_panic]
fn test_free_invalid_block() {
let mut arena: TypedArena<u32> = TypedArena::new(1024);
arena.free(&Entry { chunk_index: 0, block_index: 0 });
}
#[test]
#[should_panic]
fn test_free_vacant_block() {
let mut arena = TypedArena::new(1024);
arena.allocate(0);
arena.free(&Entry { chunk_index: 0, block_index: 1 });
}
#[test]
fn test_insert() {
let mut pool = TypedArena::new(1024);
assert_eq!(pool.allocate(0), Entry { chunk_index: 0, block_index: 0 });
assert_eq!(pool.allocate(0), Entry { chunk_index: 0, block_index: 1 });
assert_eq!(pool.allocate(0), Entry { chunk_index: 0, block_index: 2 });
}
#[test]
fn test_insert_multiple_chunks() {
let mut pool = TypedArena::new(2);
assert_eq!(pool.allocate(0), Entry { chunk_index: 0, block_index: 0 });
assert_eq!(pool.allocate(0), Entry { chunk_index: 0, block_index: 1 });
assert_eq!(pool.allocate(0), Entry { chunk_index: 1, block_index: 0 });
}
#[test]
fn test_free() {
let mut pool = TypedArena::new(1024);
let entry = pool.allocate(0);
assert_eq!(entry, Entry { chunk_index: 0, block_index: 0 });
assert_eq!(pool.free(&entry), 0);
assert_eq!(pool.allocate(0), entry);
}
#[test]
fn test_get() {
let mut pool = TypedArena::new(1024);
let entry = pool.allocate(0);
assert_eq!(pool.get(&entry), Some(&0));
}
#[test]
fn test_get_invalid_block() {
let pool: TypedArena<u32> = TypedArena::new(1024);
assert_eq!(pool.get(&Entry { chunk_index: 0, block_index: 0 }), None);
}
#[test]
fn test_get_vacant_block() {
let mut pool = TypedArena::new(1024);
pool.allocate(0);
assert_eq!(pool.get(&Entry { chunk_index: 0, block_index: 1 }), None);
}
#[test]
fn test_get_mut() {
let mut pool = TypedArena::new(1024);
let entry = pool.allocate(0);
*pool.get_mut(&entry).unwrap() = 1;
assert_eq!(pool.get(&entry), Some(&1));
}
#[test]
fn test_get_mut_invalid_block() {
let mut pool: TypedArena<u32> = TypedArena::new(1024);
assert_eq!(pool.get_mut(&Entry { chunk_index: 0, block_index: 0 }), None);
}
#[test]
fn test_get_mut_vacant_block() {
let mut pool = TypedArena::new(1024);
pool.allocate(0);
assert_eq!(pool.get_mut(&Entry { chunk_index: 0, block_index: 1 }), None);
}
}
|
//! # GSync
//! GSync is a tool to help you stay backed up. It does this by synchronizing the folders you want to Google Drive, while respecting .gitignore files
//!
//! ## Installation
//! You've got two options to install GSync
//!
//! 1. Preferred method: Via crates.io: `cargo install gsync`
//! 2. Via GitHub: [Releases](https://github.com/TheDutchMC/GSync/releases)
//!
//! ## Usage
//! 1. Create a project on [Google Deveopers](https://console.developers.google.com)
//! 2. Configure the OAuth2 consent screen and create OAuth2 credentials
//! 3. Enable the Google Drive API
//! 4. If you are planning to use a Team Drive/Shared Drive, run `gsync drives` to get the ID of the drive you want to sync to
//! 5. Configure GSync: `gsync config -i <GOOGLE APP ID> -s <GOOGLE APP SECRET> -f <INPUT FILES> -d <ID OF SHARED DRIVE>`. The `-d` parameter is optional
//! 6. Login: `gsync login`
//! 7. Sync away! `gsync sync`
//!
//! To update your configuration later, run `gsync config` again, you don't have to re-provide all options if you don't want to change them
//!
//! ## Licence
//! GSync is dual licenced under the MIT and Apache-2.0 licence, at your discretion
#![deny(deprecated)]
#![deny(clippy::panic)]
#![warn(rust_2018_idioms)]
#![warn(clippy::cargo)]
#![warn(clippy::decimal_literal_representation)]
#![warn(clippy::if_not_else)]
#![warn(clippy::large_digit_groups)]
#![warn(clippy::missing_docs_in_private_items)]
#![warn(clippy::missing_errors_doc)]
#![warn(clippy::needless_continue)]
#![allow(clippy::multiple_crate_versions)]
mod api;
mod env;
mod config;
mod login;
mod macros;
mod sync;
use clap::Arg;
use crate::env::Env;
use crate::config::Configuration;
use crate::api::GoogleError;
/// Type alias for Result
pub type Result<T> = std::result::Result<T, (Error, u32, &'static str)>;
/// Enum describing Errors which can often occur in Gsync
#[derive(Debug)]
pub enum Error {
/// Error returned by the Google API
GoogleError(GoogleError),
/// Error resulting from a database operation
DatabaseError(rusqlite::Error),
/// Error resulting from a reqwest operation
RequestError(reqwest::Error),
/// An error which does not fit in any other category
Other(String)
}
/// Version of the binary. Set in Cargo.toml
const VERSION: &str = env!("CARGO_PKG_VERSION");
fn main() {
let matches = clap::App::new("gsync")
.version(VERSION)
.author("Tobias de Bruijn <t.debruijn@array21.dev>")
.about("Sync folders and files to Google Drive while respecting gitignore files")
.subcommand(clap::SubCommand::with_name("config")
.about("Configure GSync. Not all options have to be supplied, if you don't want to overwrite them. If this is the first time you're running the config command, you must provide all options.")
.arg(Arg::with_name("client-id")
.short("i")
.long("id")
.value_name("CLIENT_ID")
.help("The Client ID provided by Google")
.takes_value(true)
.required(false))
.arg(Arg::with_name("client-secret")
.short("s")
.long("secret")
.value_name("CLIENT_SECRET")
.help("The Client Secret provided by Google")
.takes_value(true)
.required(false))
.arg(Arg::with_name("files")
.short("f")
.long("files")
.value_name("FILES")
.help("The files you want to sync, comma seperated String")
.takes_value(true)
.required(false))
.arg(Arg::with_name("drive_id")
.short("d")
.long("drive")
.value_name("ID")
.help("The ID of the Team Drive to use, if you are not using a Team Drive leave this empty.")
.takes_value(true)
.required(false)))
.subcommand(clap::SubCommand::with_name("show")
.about("Show the current GSync configuration"))
.subcommand(clap::SubCommand::with_name("login")
.about("Login to Google"))
.subcommand(clap::SubCommand::with_name("sync")
.about("Start syncing the configured folders to Google Drive"))
.subcommand(clap::SubCommand::with_name("drives")
.about("Get a list of all shared drives and their IDs."))
.get_matches();
let empty_env = Env::empty();
// Scoping this seperately because we want to drop conn when we're done, since we can only ever have 1 conn.
{
//Check if there are tables
let conn = empty_env.get_conn().expect("Failed to create database connection. ");
conn.execute("CREATE TABLE IF NOT EXISTS user (id TEXT PRIMARY KEY, refresh_token TEXT, access_token TEXT, expiry INTEGER)", rusqlite::named_params! {}).expect("Failed to create table 'users'");
conn.execute("CREATE TABLE IF NOT EXISTS config (client_id TEXT, client_secret TEXT, input_files TEXT, drive_id TEXT)", rusqlite::named_params! {}).expect("Failed to create table 'config'");
}
// 'config' subcommand
if let Some(matches) = matches.subcommand_matches("config") {
let new_config = Configuration {
client_id: option_str_string(matches.value_of("client-id")),
client_secret: option_str_string(matches.value_of("client-secret")),
input_files: option_str_string(matches.value_of("files")),
drive_id: option_str_string(matches.value_of("drive_id"))
};
let current_config = handle_err!(Configuration::get_config(&empty_env));
let config = Configuration::merge(new_config, current_config);
match config.is_complete() {
(true, _) => {},
(false, str) => {
eprintln!("Error: Configuration is incomplete; {}", str);
std::process::exit(1);
}
}
handle_err!(config.write(&empty_env));
println!("Configuration updated!");
std::process::exit(0);
}
// 'show' subcommand
if matches.subcommand_matches("show").is_some() {
let config = handle_err!(Configuration::get_config(&empty_env));
if config.is_empty() {
println!("GSync is unconfigured. Run 'gsync config -h` for more information on how to configure GSync'");
std::process::exit(0);
}
println!("Current GSync configuration:");
println!("Client ID: {}", option_unwrap_text(config.client_id));
println!("Client Secret: {}", option_unwrap_text(config.client_secret));
println!("Input Files: {}", option_unwrap_text(config.input_files));
println!("Drive ID: {}", option_unwrap_text(config.drive_id));
std::process::exit(0);
}
// 'login' subcommand
if matches.subcommand_matches("login").is_some() {
let config = handle_err!(Configuration::get_config(&empty_env));
if config.is_empty() {
println!("GSync is unconfigured. Run 'gsync config -h` for more information on how to configure GSync'");
std::process::exit(0);
}
match config.is_complete() {
(true, _) => {},
(false, str) => {
eprintln!("Error: Configuration is incomplete; {}", str);
std::process::exit(1);
}
}
// Safe to call unwrap because we've verified that the config is complete
let env = Env::new(config.client_id.as_ref().unwrap(), config.client_secret.as_ref().unwrap(), config.drive_id.as_ref(), String::new());
let login_data = handle_err!(crate::login::perform_oauth2_login(&env));
println!("Info: Inserting tokens into database.");
handle_err!(crate::login::db::save_to_database(&login_data, &env));
println!("Info: Login successful!");
std::process::exit(0);
}
// 'sync' subcommand
if matches.subcommand_matches("sync").is_some() {
let config = handle_err!(Configuration::get_config(&empty_env));
if config.is_empty() {
println!("GSync is unconfigured. Run 'gsync config -h` for more information on how to configure GSync'");
std::process::exit(0);
}
match config.is_complete() {
(true, _) => {},
(false, str) => {
eprintln!("Error: Configuration is incomplete; {}", str);
std::process::exit(1);
}
}
if !handle_err!(is_logged_in(&empty_env)) {
eprintln!("Error: GSync isn't logged in with Google. Have you run `gsync login` yet?");
std::process::exit(1);
}
// Safe to call unwrap because we verified the config is complete above
let mut env = Env::new(config.client_id.as_ref().unwrap(), config.client_secret.as_ref().unwrap(), config.drive_id.as_ref(), String::new());
println!("Info: Querying Drive for root folder");
let list = handle_err!(crate::api::drive::list_files(&env, Some("name = 'GSync' and mimeType = 'application/vnd.google-apps.folder' and trashed = false"), config.drive_id.as_deref()));
let root_folder_id = if list.is_empty() {
println!("Info: Root folder doesn't exist. Creating one now.");
match &env.drive_id {
Some(drive_id) => handle_err!(crate::api::drive::create_folder(&env, "GSync", drive_id)),
None => handle_err!(crate::api::drive::create_folder(&env, "GSync", "root"))
}
} else {
println!("Info: Root folder exists.");
list.get(0).unwrap().id.clone()
};
env.root_folder = root_folder_id;
handle_err!(crate::sync::sync(&config, &env));
std::process::exit(0);
}
if matches.subcommand_matches("drives").is_some() {
let config = handle_err!(Configuration::get_config(&empty_env));
if config.is_empty() {
println!("GSync is unconfigured. Run 'gsync config -h` for more information on how to configure GSync'");
std::process::exit(0);
}
match config.is_complete() {
(true, _) => {},
(false, str) => {
eprintln!("Error: Configuration is incomplete; {}", str);
std::process::exit(1);
}
}
if !handle_err!(is_logged_in(&empty_env)) {
eprintln!("Error: GSync isn't logged in with Google. Have you run `gsync login` yet?");
std::process::exit(1);
}
let env = Env::new(config.client_id.as_ref().unwrap(), config.client_secret.as_ref().unwrap(), config.drive_id.as_ref(), String::new());
let shared_drives = handle_err!(crate::api::drive::get_shared_drives(&env));
for drive in shared_drives {
println!("Shared drive '{}' with identifier '{}'", &drive.name, &drive.id);
}
std::process::exit(0);
}
println!("No command specified. Run 'gsync -h' for available commands.");
}
/// Convert a Option<&str> to an Option<String>
fn option_str_string(i: Option<&str>) -> Option<String> {
i.map(|i| i.to_string())
}
/// Unwrap an Option<String> to a String. If the input is None, you'll get back the literal `None`
fn option_unwrap_text(i: Option<String>) -> String {
match i {
Some(i) => i,
None => "None".to_string()
}
}
/// Check if a user is logged in
///
/// # Errors
/// - When a database operation fails
fn is_logged_in(env: &Env) -> Result<bool> {
let conn = unwrap_db_err!(env.get_conn());
let mut stmt = unwrap_db_err!(conn.prepare("SELECT * FROM user"));
let mut result = unwrap_db_err!(stmt.query(rusqlite::named_params! {}));
let mut is_logged_in = false;
while let Ok(Some(_)) = result.next() {
is_logged_in = true;
}
Ok(is_logged_in)
} |
// q0125_valid_palindrome
struct Solution;
impl Solution {
pub fn is_palindrome(s: String) -> bool {
let s: Vec<char> = s
.chars()
.filter(char::is_ascii_alphanumeric)
.map(|c| {
if c.is_uppercase() {
c.to_ascii_lowercase()
} else {
c
}
})
.collect();
if s.len() == 0 {
return true;
}
let mut i = 0;
let mut j = s.len() - 1;
while i < j {
if s[i] == s[j] {
i += 1;
j -= 1;
} else {
return false;
}
}
true
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(
true,
Solution::is_palindrome(String::from("A man, a plan, a canal: Panama"))
);
assert_eq!(false, Solution::is_palindrome(String::from("race a car")));
}
}
|
use nom::{be_u8,be_u32,IResult,Needed,Err,ErrorKind};
use std::str::from_utf8;
/// Recognizes big endian unsigned 4 bytes integer
#[inline]
pub fn be_u24(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 3 {
IResult::Incomplete(Needed::Size(3))
} else {
let res = ((i[0] as u32) << 16) + ((i[1] as u32) << 8) + i[2] as u32;
IResult::Done(&i[3..], res)
}
}
#[derive(Debug,PartialEq,Eq)]
pub struct Header {
version: u8,
audio: bool,
video: bool,
offset: u32,
}
named!(pub header<Header>,
chain!(
tag!("FLV") ~
version: be_u8 ~
flags: be_u8 ~
offset: be_u32 ,
|| {
Header {
version: version,
audio: flags & 4 == 4,
video: flags & 1 == 1,
offset: offset
}
}
)
);
#[derive(Debug,PartialEq,Eq)]
pub enum TagType {
Audio,
Video,
Script,
}
#[derive(Debug,PartialEq,Eq)]
pub struct TagHeader {
tag_type: TagType,
data_size: u32,
timestamp: u32,
stream_id: u32,
}
#[derive(Debug,PartialEq,Eq)]
pub enum TagData {
//Audio(AudioData),
Audio,
Video,
Script,
}
#[derive(Debug,PartialEq,Eq)]
pub struct Tag {
header: TagHeader,
data: TagData,
}
named!(pub tag_header<TagHeader>,
chain!(
tag_type: switch!(be_u8,
8 => value!(TagType::Audio) |
9 => value!(TagType::Video) |
18 => value!(TagType::Script)
) ~
data_size: be_u24 ~
timestamp: be_u24 ~
timestamp_extended: be_u8 ~
stream_id: be_u24 ,
|| {
TagHeader {
tag_type: tag_type,
data_size: data_size,
timestamp: (timestamp_extended as u32) << 24 + timestamp,
stream_id: stream_id,
}
}
)
);
#[derive(Debug,PartialEq,Eq)]
pub enum SoundFormat {
PCM_BE,
ADPCM,
MP3,
PCM_LE,
NELLYMOSER_16KHZ_MONO,
NELLYMOSER_8KHZ_MONO,
NELLYMOSER,
PCM_ALAW,
PCM_ULAW,
AAC,
SPEEX,
MP3_8KHZ,
DEVICE_SPECIFIC,
}
#[derive(Debug,PartialEq,Eq)]
pub enum SoundRate {
_5_5KHZ,
_11KHZ,
_22KHZ,
_44KHZ,
}
#[derive(Debug,PartialEq,Eq)]
pub enum SoundSize {
Snd8bit,
Snd16bit,
}
#[derive(Debug,PartialEq,Eq)]
pub enum SoundType {
SndMono,
SndStereo,
}
#[derive(Debug,PartialEq,Eq)]
pub struct AudioData<'a> {
sound_format: SoundFormat,
sound_rate: SoundRate,
sound_size: SoundSize,
sound_type: SoundType,
sound_data: &'a [u8]
}
pub fn audio_data(input: &[u8], size: usize) -> IResult<&[u8], AudioData> {
if input.len() < size {
return IResult::Incomplete(Needed::Size(size));
}
let (remaining, (sformat, srate, ssize, stype)) = try_parse!(input, bits!(
tuple!(
switch!(take_bits!(u8, 4),
0 => value!(SoundFormat::PCM_BE)
| 1 => value!(SoundFormat::ADPCM)
| 2 => value!(SoundFormat::MP3)
| 3 => value!(SoundFormat::PCM_LE)
| 4 => value!(SoundFormat::NELLYMOSER_16KHZ_MONO)
| 5 => value!(SoundFormat::NELLYMOSER_8KHZ_MONO)
| 6 => value!(SoundFormat::NELLYMOSER)
| 7 => value!(SoundFormat::PCM_ALAW)
| 8 => value!(SoundFormat::PCM_ULAW)
| 10 => value!(SoundFormat::AAC)
| 11 => value!(SoundFormat::SPEEX)
| 14 => value!(SoundFormat::MP3_8KHZ)
| 15 => value!(SoundFormat::DEVICE_SPECIFIC)
),
switch!(take_bits!(u8, 2),
0 => value!(SoundRate::_5_5KHZ)
| 1 => value!(SoundRate::_11KHZ)
| 2 => value!(SoundRate::_22KHZ)
| 3 => value!(SoundRate::_44KHZ)
),
switch!(take_bits!(u8, 1),
0 => value!(SoundSize::Snd8bit)
| 1 => value!(SoundSize::Snd16bit)
),
switch!(take_bits!(u8, 1),
0 => value!(SoundType::SndMono)
| 1 => value!(SoundType::SndStereo)
)
)
));
IResult::Done(&input[size..], AudioData {
sound_format: sformat,
sound_rate: srate,
sound_size: ssize,
sound_type: stype,
sound_data: &input[1..size]
})
}
#[derive(Debug,PartialEq,Eq)]
pub enum FrameType {
Key,
Inter,
DisposableInter,
Generated,
Command,
}
#[derive(Debug,PartialEq,Eq)]
pub enum CodecId {
JPEG,
H263,
SCREEN,
VP6,
VP6A,
SCREEN2,
H264,
}
#[derive(Debug,PartialEq,Eq)]
pub struct VideoData<'a> {
frame_type: FrameType,
codec_id: CodecId,
video_data: &'a [u8]
}
pub fn video_data(input: &[u8], size: usize) -> IResult<&[u8], VideoData> {
if input.len() < size {
return IResult::Incomplete(Needed::Size(size));
}
let (remaining, (frame_type, codec_id)) = try_parse!(input, bits!(
tuple!(
switch!(take_bits!(u8, 4),
1 => value!(FrameType::Key)
| 2 => value!(FrameType::Inter)
| 3 => value!(FrameType::DisposableInter)
| 4 => value!(FrameType::Generated)
| 5 => value!(FrameType::Command)
),
switch!(take_bits!(u8, 4),
1 => value!(CodecId::JPEG)
| 2 => value!(CodecId::H263)
| 3 => value!(CodecId::SCREEN)
| 4 => value!(CodecId::VP6)
| 5 => value!(CodecId::VP6A)
| 6 => value!(CodecId::SCREEN2)
| 7 => value!(CodecId::H264)
)
)
));
IResult::Done(&input[size..], VideoData {
frame_type: frame_type,
codec_id: codec_id,
video_data: &input[1..size]
})
}
#[derive(Debug,PartialEq,Eq)]
pub struct ScriptDataObject<'a> {
name: &'a str,
data: ScriptDataValue<'a>
}
#[derive(Debug,PartialEq,Eq)]
pub enum ScriptDataValue<'a> {
Number,
Boolean,
String(&'a str),
Object,
MovieClip,
Null,
UNdefined,
Reference,
ECMAArray,
StrictArray,
Date,
LongString(&'a str),
}
/*
named!(pub script_data_object<ScriptDataObject>,
chain!(
name: script_data_string ~
data: script_data_value ,
|| {
ScriptDataObject {
name: name,
data: data
}
}
)
);
pub fn script_data_object_end(input:&[u8]) -> IResult<&[u8],()> {
match be_u24(input) {
IResult::Done(i,o) => if o == 9 {
IResult::Done(i,())
} else {
IResult::Error(Err::Code(ErrorKind::Tag))
},
e => e
}
}
named!(pub script_data_string<&str>, map_res!(length_bytes!(be_u16), from_utf8));
named!(pub script_data_long_string<&str>, map_res!(length_bytes!(be_u32), from_utf8));
named!(pub script_data_value<ScriptDataValue>, );
#[derive(Debug,PartialEq,Eq)]
pub struct ScriptData {
objects: Vec<ScriptDataObject>
}
*/
#[allow(non_uppercase_globals)]
#[cfg(test)]
mod tests {
use super::*;
use nom::{IResult,be_u32,HexDisplay};
const zelda : &'static [u8] = include_bytes!("../assets/zelda.flv");
const zeldaHQ : &'static [u8] = include_bytes!("../assets/zeldaHQ.flv");
const commercials : &'static [u8] = include_bytes!("../assets/asian-commercials-are-weird.flv");
#[test]
fn headers() {
assert_eq!(
header(&zelda[..9]),
IResult::Done(
&b""[..],
Header { version: 1, audio: true, video: true, offset: 9 }
));
assert_eq!(
header(&zeldaHQ[..9]),
IResult::Done(
&b""[..],
Header { version: 1, audio: true, video: true, offset: 9 }
));
assert_eq!(
header(&commercials[..9]),
IResult::Done(
&b""[..],
Header { version: 1, audio: true, video: true, offset: 9 }
));
}
#[test]
fn first_tag_headers() {
// starts at 9 bytes (header) + 4 (size of previous tag)
// header is 11 bytes long
assert_eq!(
tag_header(&zelda[13..24]),
IResult::Done(
&b""[..],
TagHeader { tag_type: TagType::Video, data_size: 537, timestamp: 0, stream_id: 0 }
));
assert_eq!(
tag_header(&zeldaHQ[13..24]),
IResult::Done(
&b""[..],
TagHeader { tag_type: TagType::Video, data_size: 2984, timestamp: 0, stream_id: 0 }
));
assert_eq!(
tag_header(&commercials[13..24]),
IResult::Done(
&b""[..],
TagHeader { tag_type: TagType::Script, data_size: 273, timestamp: 0, stream_id: 0 }
));
}
#[test]
fn audio_tags() {
let tag_start = 24+537+4;
println!("size of previous tag: {:?}", be_u32(&zelda[24+537..tag_start]));
assert_eq!(
tag_header(&zelda[tag_start..tag_start+11]),
IResult::Done(
&b""[..],
TagHeader { tag_type: TagType::Audio, data_size: 642, timestamp: 0, stream_id: 0 }
));
let tag_start2 = 24+2984+4;
println!("size of previous tag: {:?}", be_u32(&zeldaHQ[24+2984..tag_start2]));
println!("data:\n{}", (&zeldaHQ[tag_start2..tag_start2+11]).to_hex(8));
assert_eq!(
tag_header(&zeldaHQ[tag_start2..tag_start2+11]),
IResult::Done(
&b""[..],
TagHeader { tag_type: TagType::Audio, data_size: 642, timestamp: 0, stream_id: 0 }
));
println!("data: {:?}", audio_data(&zelda[tag_start+11..tag_start+11+642], 642));
println!("data: {:?}", audio_data(&zeldaHQ[tag_start2+11..tag_start2+11+642], 642));
assert_eq!(
audio_data(&zelda[tag_start+11..tag_start+11+642], 642),
IResult::Done(
&b""[..],
AudioData {
sound_format: SoundFormat::ADPCM,
sound_rate: SoundRate::_22KHZ,
sound_size: SoundSize::Snd16bit,
sound_type: SoundType::SndMono,
sound_data: &zelda[tag_start+12..tag_start+11+642]
}
));
assert_eq!(
audio_data(&zeldaHQ[tag_start2+11..tag_start2+11+642], 642),
IResult::Done(
&b""[..],
AudioData {
sound_format: SoundFormat::ADPCM,
sound_rate: SoundRate::_22KHZ,
sound_size: SoundSize::Snd16bit,
sound_type: SoundType::SndMono,
sound_data: &zeldaHQ[tag_start2+12..tag_start2+11+642]
}
));
}
#[test]
fn video_tags() {
let tag_start = 24;
assert_eq!(
video_data(&zelda[tag_start..tag_start+537], 537),
IResult::Done(
&b""[..],
VideoData {
frame_type: FrameType::Key,
codec_id: CodecId::H263,
video_data: &zelda[tag_start+1..tag_start+537]
}
));
assert_eq!(
video_data(&zelda[tag_start..tag_start+2984], 2984),
IResult::Done(
&b""[..],
VideoData {
frame_type: FrameType::Key,
codec_id: CodecId::H263,
video_data: &zelda[tag_start+1..tag_start+2984]
}
));
}
}
|
extern crate yada;
use yada::builder::DoubleArrayBuilder;
use yada::DoubleArray;
fn main() {
// make a keyset which have key-value pairs
let keyset = &[
("a".as_bytes(), 0),
("ab".as_bytes(), 1),
("abc".as_bytes(), 2),
("b".as_bytes(), 3),
("bc".as_bytes(), 4),
("c".as_bytes(), 5),
];
// build a double-array trie binary
let da_bytes = DoubleArrayBuilder::build(keyset);
assert!(da_bytes.is_some());
// create a double-array trie instance
let da = DoubleArray::new(da_bytes.unwrap());
// exact match search
for (key, value) in keyset {
assert_eq!(da.exact_match_search(key), Some(*value as u32));
}
assert_eq!(da.exact_match_search("aa".as_bytes()), None);
assert_eq!(da.exact_match_search("aba".as_bytes()), None);
assert_eq!(da.exact_match_search("abb".as_bytes()), None);
assert_eq!(da.exact_match_search("abcd".as_bytes()), None);
assert_eq!(da.exact_match_search("ba".as_bytes()), None);
assert_eq!(da.exact_match_search("bb".as_bytes()), None);
assert_eq!(da.exact_match_search("bcd".as_bytes()), None);
assert_eq!(da.exact_match_search("ca".as_bytes()), None);
// common prefix search
assert_eq!(
da.common_prefix_search("a".as_bytes()).collect::<Vec<_>>(),
vec![(0, 1)] // match "a"
);
assert_eq!(
da.common_prefix_search("abc".as_bytes())
.collect::<Vec<_>>(),
vec![(0, 1), (1, 2), (2, 3)] // match "a", "ab", "abc"
);
assert_eq!(
da.common_prefix_search("abcd".as_bytes())
.collect::<Vec<_>>(),
vec![(0, 1), (1, 2), (2, 3)] // match "a", "ab", "abc"
);
assert_eq!(
da.common_prefix_search("bcd".as_bytes())
.collect::<Vec<_>>(),
vec![(3, 1), (4, 2)] // match "b", "bc"
);
assert_eq!(
da.common_prefix_search("d".as_bytes()).collect::<Vec<_>>(),
vec![] // don't match
);
}
|
use ansi_term::Colour::Red;
use std::io;
use std::io::Write;
fn print_before_read(text: &Option<String>) {
match text {
Some(text) => {
print!("{}", text);
io::stdout().flush().unwrap();
}
None => {}
};
}
pub fn read_number(text: Option<String>) -> i32 {
loop {
print_before_read(&text);
//What the fuck rust stdlib
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
let result = input.trim().parse();
match result {
Ok(value) => return value,
Err(err) => {
println!("{}", Red.paint(err.to_string()));
}
}
}
}
|
//! An implementation of the [BLAKE2][1] hash functions.
//!
//! Based on the [work][2] of Cesar Barros.
//!
//! # Usage
//!
//! An example of using `Blake2b` is:
//!
//! ```rust
//! use blake2::{Blake2b, Digest};
//!
//! // create a Blake2b object
//! let mut hasher = Blake2b::default();
//!
//! // write input message
//! hasher.input(b"hello world");
//!
//! // read hash digest and consume hasher
//! let output = hasher.result();
//! println!("{:x}", output);
//! ```
//!
//! Same for `Blake2s`:
//!
//! ```rust
//! use blake2::{Blake2s, Digest};
//!
//! let mut hasher = Blake2s::default();
//! hasher.input(b"hello world");
//! let output = hasher.result();
//! println!("{:x}", output);
//! ```
//!
//! [1]: https://en.wikipedia.org/wiki/BLAKE_(hash_function)#BLAKE2
//! [2]: https://github.com/cesarb/blake2-rfc
#![no_std]
#![warn(missing_docs)]
#![cfg_attr(feature = "simd", feature(platform_intrinsics, repr_simd))]
#![cfg_attr(feature = "simd_opt", feature(cfg_target_feature))]
#![cfg_attr(feature = "simd_asm", feature(asm))]
extern crate byte_tools;
extern crate digest;
// extern crate crypto_mac;
//extern crate crypto_ops;
extern crate generic_array;
mod consts;
mod as_bytes;
mod bytes;
mod simdty;
mod simdint;
mod simdop;
mod simd_opt;
mod simd;
#[macro_use]
mod blake2;
mod blake2b;
mod blake2s;
pub use digest::Digest;
pub use blake2b::Blake2b;
pub use blake2s::Blake2s;
|
use project::Project;
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct IndexProject {
pub id: String,
pub project_folder: String,
pub image_name: String,
pub title: String,
pub b_w: bool,
}
impl IndexProject {
pub fn from(proj: &Project) -> IndexProject {
let project_folder = proj.meta.title.replace(" ", "-");
let id = String::from("project-") + &project_folder.to_lowercase();
println!("creating index project with images {:?}", proj.images);
let image_name = match proj.images.iter().next() {
Some(i) => super::file_name(&i.path),
None => String::new(),
};
let title = proj.meta.title.clone();
IndexProject {
id,
project_folder,
image_name,
title,
b_w: proj.bw_title_image
}
}
}
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct Page {
pub title: String,
pub sub_title: String,
pub teammates: Vec<String>,
pub images: Vec<String>,
pub content: String,
pub project_folder: String
}
impl Page {
pub fn from(project: &Project) -> Page {
println!("Page::from: {:?}", project.images.len());
let title = project.meta.title.clone();
let sub_title = project.meta.context.clone();
let teammates = project.meta.teammates.clone();
let images = project.images.iter().map(|i| {
println!("mapping image: {:?}", &i);
super::file_name(&i.path)
}).collect();
let content = super::generate_html(&project.description);
let project_folder = project.meta.title.replace(" ", "-");
Page {
title,
sub_title,
teammates,
images,
content,
project_folder
}
}
} |
use assembly_xml::universe_config::Environment;
fn main() {
let mut args = std::env::args();
let _self = args.next().unwrap();
let file = args.next().unwrap();
println!("{}", file);
let xml = std::fs::read_to_string(&file).unwrap();
let data: Environment = quick_xml::de::from_str(&xml).unwrap();
println!("{:#?}", data);
}
|
use comet::immix::Immix;
use comet::immix::ImmixOptions;
use comet::mutator::MutatorRef;
use kompact::prelude::*;
pub struct Runtime {
pub system: KompactSystem,
}
impl Runtime {
pub fn new() -> Self {
let system = KompactConfig::default().build().unwrap();
Self { system }
}
}
impl Default for Runtime {
fn default() -> Self {
Self::new()
}
}
|
use env_logger::Builder;
use log::LevelFilter;
use stainless_ffmpeg::probe::*;
use std::env;
fn main() {
let mut builder = Builder::from_default_env();
builder.init();
if let Some(path) = env::args().last() {
let mut probe = Probe::new(&path);
probe.process(LevelFilter::Off).unwrap();
let result = serde_json::to_string(&probe).unwrap();
println!("{}", result);
if let Some(format) = probe.format {
println!("Format : \n{}", format);
}
}
}
|
// ------ RCC Clock Frequency related -------------------------
//
// For the `HSI` (High-Speed Internal clock signal), it's an
// internal 16 MHz RC oscillator.
//
// For the `HSE` (High-Speed External clock signal), usually,
// it's external onboard oscillator hardware with the fixed
// working frequency. Make sure you check the hardware datasheet
// before setting `HSE` value (unit in Hertz).
//
// We can pick one of the path below to as the clock source and
// calculate the `SYSCLK` working frequency:
//
// HSI ----------------------------------------------> SYSCLK
// HSE ----------------------------------------------> SYSCLK
// PLL ----> (HSI or HSE) / PLL_M * PLL_N / PLL_P ---> SYSCLK
//
// When the expected working frequency is higher than `HSI` or
// `HSE` can offer, then you have to use `PLL` as clock source!!!
//
// When using `PLL` as clock source, we need to set the correct
// PLL factors (M/N/P) to calculate the `SYSCLK` (system clock)
// and `HCLK` (hardware CPU clock) working frequency.
//
// When using `PLL` as clock source, we should calculate `SYSCLK`
// working frequency with the following formula:
//
// PLL_VCO = (HSE_FREQUENCY or HSI_FREQUENCY / PLL_M) * PLL_N
// SYSCLK = PLL_VCO / PLL_P
// */
// `VCO` stands for `Voltage-Controlled Oscillator`
//
//
// Tips for picking the right factor value:
//
// 1. `PLL_M`: We can always make it equal to the `HSI_FREQUENCY`
// or `HSE_FREQUENCY`, then:
//
// `(HSE_FREQUENCY or HSI_FREQUENCY / PLL_M)` always return `1`
//
// which more easy to to do the rest calculating. But keep in mind
// that `PLL_M` have to in the allowed range provided below.
//
// 2. `PLL_P`: We can try start from `2`, then `PLL_M` and `PLL_P`
// already fixed, only left the `PLL_N` to choose.
//
// 3. If fixed `PLL_M` and `PLL_P` not works, then go to `STM32CubeMX`
// UI to try the combination.
//
#[cfg(feature = "use-stm32f407g-disc1")]
pub mod clock_source_selecting {
pub const SYS_CLOCK_MAX_SPEED: u32 = 168_000_000;
pub const APB1_PERIPHERAL_MAX_SPEED: u32 = 42_000_000;
pub const APB2_PERIPHERAL_MAX_SPEED: u32 = 84_000_000;
pub const APB1_TIMER_FACTOR: u32 = 2;
pub const APB2_TIMER_FACTOR: u32 = 2;
pub const FLASH_LATENCY: u32 = 5;
// Use HSI --> PLL as clock source and to max frequency
pub const HSI_FREQUENCY: u32 = 16_000_000;
pub const AHB_PRESCALER_FOR_HSI: u32 = 1;
pub const APB1_PRESCALER_FOR_HSI: u32 = 4;
pub const APB2_PRESCALER_FOR_HSI: u32 = 2;
pub const PLL_M_PRESCALER_FOR_HSI: u32 = 16; // 2 ≤PLLM ≤63
pub const PLL_N_PRESCALER_FOR_HSI: u32 = 336; // 50 ≤PLLN ≤432
pub const PLL_P_PRESCALER_FOR_HSI: u32 = 2; // PLLP = 2, 4, 6, or 8
pub const PLL_Q_PRESCALER_FOR_HSI: u32 = 4; // PLLQ with 2 ≤PLLQ ≤15
// Use HSE --> PLL as clock source and to max frequency
pub const HSE_FREQUENCY: u32 = 8_000_000;
pub const AHB_PRESCALER_FOR_HSE: u32 = 1;
pub const APB1_PRESCALER_FOR_HSE: u32 = 4;
pub const APB2_PRESCALER_FOR_HSE: u32 = 2;
pub const PLL_M_PRESCALER_FOR_HSE: u32 = 8; // 2 ≤PLLM ≤63
pub const PLL_N_PRESCALER_FOR_HSE: u32 = 336; // 50 ≤PLLN ≤432
pub const PLL_P_PRESCALER_FOR_HSE: u32 = 2; // PLLP = 2, 4, 6, or 8
pub const PLL_Q_PRESCALER_FOR_HSE: u32 = 4; // PLLQ with 2 ≤PLLQ ≤15
}
#[cfg(feature = "use-weact-black-pill")]
pub mod clock_source_selecting {
pub const SYS_CLOCK_MAX_SPEED: u32 = 100_000_000;
pub const APB1_PERIPHERAL_MAX_SPEED: u32 = 50_000_000;
pub const APB2_PERIPHERAL_MAX_SPEED: u32 = 100_000_000;
pub const APB1_TIMER_FACTOR: u32 = 2;
pub const APB2_TIMER_FACTOR: u32 = 1;
pub const FLASH_LATENCY: u32 = 3;
// Use HSI --> PLL as clock source and to max frequency
pub const HSI_FREQUENCY: u32 = 16_000_000;
pub const AHB_PRESCALER_FOR_HSI: u32 = 1;
pub const APB1_PRESCALER_FOR_HSI: u32 = 2;
pub const APB2_PRESCALER_FOR_HSI: u32 = 1;
pub const PLL_M_PRESCALER_FOR_HSI: u32 = 16; // 2 ≤PLLM ≤63
pub const PLL_N_PRESCALER_FOR_HSI: u32 = 200; // 50 ≤PLLN ≤432
pub const PLL_P_PRESCALER_FOR_HSI: u32 = 2; // PLLP = 2, 4, 6, or 8
pub const PLL_Q_PRESCALER_FOR_HSI: u32 = 4; // PLLQ with 2 ≤PLLQ ≤15
// Use HSE --> PLL as clock source and to max frequency
pub const HSE_FREQUENCY: u32 = 25_000_000;
pub const AHB_PRESCALER_FOR_HSE: u32 = 1;
pub const APB1_PRESCALER_FOR_HSE: u32 = 2;
pub const APB2_PRESCALER_FOR_HSE: u32 = 1;
pub const PLL_M_PRESCALER_FOR_HSE: u32 = 25; // 2 ≤PLLM ≤63
pub const PLL_N_PRESCALER_FOR_HSE: u32 = 200; // 50 ≤PLLN ≤432
pub const PLL_P_PRESCALER_FOR_HSE: u32 = 2; // PLLP = 2, 4, 6, or 8
pub const PLL_Q_PRESCALER_FOR_HSE: u32 = 4; // PLLQ with 2 ≤PLLQ ≤15
}
// ------ RCC registers address -------------------------------
pub const RCC_CR: u32 = 0x4002_3800; // page 65
// pub const RCC_AHB1RSTR: u32 = RCC_CR + 0x10; // page 233
// pub const RCC_AHB2RSTR: u32 = RCC_CR + 0x14; // page 236
// pub const RCC_AHB3RSTR: u32 = RCC_CR + 0x18; // page 237
// pub const RCC_AHB1ENR: u32 = RCC_CR + 0x30; // page 242, 243
// pub const RCC_AHB1LPENR: u32 = RCC_CR + 0x50; // Low power (sleep) mode, page 250, 252,
// pub const RCC_AHB2ENR: u32 = RCC_CR + 0x34; // page 244
// pub const RCC_AHB2LPENR: u32 = RCC_CR + 0x54; // page 252
|
use crate::ffmpeg::{ TransitionFunc, Size };
pub struct AlphaBlend;
pub struct Vertical;
impl TransitionFunc for AlphaBlend {
fn calc(&self, alpha: f32, img1: &Vec<u8>, img2: &Vec<u8>, _size: &Size) -> Vec<u8> {
// create output vector with image size
let mut r = vec![0; img1.len()];
// iterate by images data
for (d, (a, b)) in r.iter_mut().zip(img1.iter().zip(img2.iter())) {
// mixing colors
*d = (*b as f32 * alpha + *a as f32 * (1.0 - alpha)).round() as u8;
}
r
}
}
impl TransitionFunc for Vertical {
fn calc(&self, percent: f32, img1: &Vec<u8>, img2: &Vec<u8>, size: &Size) -> Vec<u8> {
let mut r = vec![0; img1.len()];
// current x position in the picture, 3 -- color channels
let position = (3.0 * percent * size.width as f32).abs().round() as usize;
// ... you know ... iterator
for (i, (d, (a, b))) in r.iter_mut().zip(img1.iter().zip(img2.iter())).enumerate() {
// x value of current pixel
let current_x = i % (size.width * 3);
*d = match current_x > position {
// right side -- img1
true => *a,
// left side -- img2
false => *b,
};
}
r
}
} |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
//! Serialization support for timespec structs.
use serde::Deserialize;
use serde::Serialize;
/// A serializable version of `libc::timespec`.
#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq, Debug, Hash)]
#[repr(C)]
pub struct Timespec {
/// Seconds
pub tv_sec: libc::time_t,
/// Nanoseconds
pub tv_nsec: libc::c_long,
}
impl From<Timespec> for libc::timespec {
fn from(ts: Timespec) -> libc::timespec {
libc::timespec {
tv_sec: ts.tv_sec,
tv_nsec: ts.tv_nsec,
}
}
}
impl From<libc::timespec> for Timespec {
fn from(ts: libc::timespec) -> Self {
Self {
tv_sec: ts.tv_sec,
tv_nsec: ts.tv_nsec,
}
}
}
impl From<libc::statx_timestamp> for Timespec {
fn from(tp: libc::statx_timestamp) -> Self {
Timespec {
tv_sec: tp.tv_sec as _,
tv_nsec: tp.tv_nsec as _,
}
}
}
impl From<Timespec> for libc::statx_timestamp {
fn from(tp: Timespec) -> Self {
libc::statx_timestamp {
tv_sec: tp.tv_sec as _,
tv_nsec: tp.tv_nsec as _,
__statx_timestamp_pad1: [0],
}
}
}
impl From<libc::timeval> for Timespec {
fn from(tv: libc::timeval) -> Self {
Timespec {
tv_sec: tv.tv_sec as _,
tv_nsec: (1000 * tv.tv_usec) as _,
}
}
}
impl From<Timespec> for libc::timeval {
fn from(ts: Timespec) -> Self {
libc::timeval {
tv_sec: ts.tv_sec as _,
tv_usec: (ts.tv_nsec / 1000) as _,
}
}
}
impl std::fmt::Display for Timespec {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{{ tv_sec: {}, tv_nsec: {} }}",
self.tv_sec, self.tv_nsec
)
}
}
/// A serializable version of `libc::timeval`.
#[derive(Serialize, Deserialize)]
#[derive(Default, Copy, Clone, Eq, PartialEq, Debug, Hash)]
#[repr(C)]
#[allow(missing_docs)]
pub struct Timeval {
pub tv_sec: libc::time_t,
pub tv_usec: libc::suseconds_t,
}
impl std::fmt::Display for Timeval {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{{ tv_sec: {}, tv_usec: {} }}",
self.tv_sec, self.tv_usec
)
}
}
/// A serializable version of `libc::timezone`.
#[derive(Serialize, Deserialize)]
#[derive(Default, Copy, Clone, Eq, PartialEq, Debug, Hash)]
#[repr(C)]
#[allow(missing_docs)]
pub struct Timezone {
tz_minuteswest: libc::c_int,
tz_dsttime: libc::c_int,
}
crate::impl_displayable!(Display Timeval);
crate::impl_displayable!(Display Timespec);
crate::displayable_ptr!(TimevalMutPtr, AddrMut<Timeval>);
crate::displayable_ptr!(TimespecMutPtr, AddrMut<Timespec>);
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - CEC control register"]
pub cec_cr: CEC_CR,
#[doc = "0x04 - This register is used to configure the HDMI-CEC controller. It is mandatory to write CEC_CFGR only when CECEN=0."]
pub cec_cfgr: CEC_CFGR,
#[doc = "0x08 - CEC Tx data register"]
pub cec_txdr: CEC_TXDR,
#[doc = "0x0c - CEC Rx data register"]
pub cec_rxdr: CEC_RXDR,
#[doc = "0x10 - CEC Interrupt and Status Register"]
pub cec_isr: CEC_ISR,
#[doc = "0x14 - CEC interrupt enable register"]
pub cec_ier: CEC_IER,
}
#[doc = "CEC_CR (rw) register accessor: CEC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cec_cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_cr`]
module"]
pub type CEC_CR = crate::Reg<cec_cr::CEC_CR_SPEC>;
#[doc = "CEC control register"]
pub mod cec_cr;
#[doc = "CEC_CFGR (rw) register accessor: This register is used to configure the HDMI-CEC controller. It is mandatory to write CEC_CFGR only when CECEN=0.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cec_cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_cfgr`]
module"]
pub type CEC_CFGR = crate::Reg<cec_cfgr::CEC_CFGR_SPEC>;
#[doc = "This register is used to configure the HDMI-CEC controller. It is mandatory to write CEC_CFGR only when CECEN=0."]
pub mod cec_cfgr;
#[doc = "CEC_TXDR (rw) register accessor: CEC Tx data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_txdr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cec_txdr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_txdr`]
module"]
pub type CEC_TXDR = crate::Reg<cec_txdr::CEC_TXDR_SPEC>;
#[doc = "CEC Tx data register"]
pub mod cec_txdr;
#[doc = "CEC_RXDR (r) register accessor: CEC Rx data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_rxdr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_rxdr`]
module"]
pub type CEC_RXDR = crate::Reg<cec_rxdr::CEC_RXDR_SPEC>;
#[doc = "CEC Rx data register"]
pub mod cec_rxdr;
#[doc = "CEC_ISR (rw) register accessor: CEC Interrupt and Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_isr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cec_isr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_isr`]
module"]
pub type CEC_ISR = crate::Reg<cec_isr::CEC_ISR_SPEC>;
#[doc = "CEC Interrupt and Status Register"]
pub mod cec_isr;
#[doc = "CEC_IER (rw) register accessor: CEC interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cec_ier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cec_ier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cec_ier`]
module"]
pub type CEC_IER = crate::Reg<cec_ier::CEC_IER_SPEC>;
#[doc = "CEC interrupt enable register"]
pub mod cec_ier;
|
use syn::Attribute;
use syn::Lit;
use syn::Meta;
pub fn get_docs(attrs: &Vec<Attribute>) -> String {
let mut doc: Vec<String> = vec![];
for attr in attrs {
if let Ok(Meta::NameValue(meta)) = attr.parse_meta() {
if !meta.path.is_ident("doc") {
continue;
}
if let Lit::Str(lit) = meta.lit {
doc.push(lit.value());
}
}
}
let doc_str = if doc.len() > 0 {
format!("/**\n *{}\n **/\n", doc.join("\n *"))
} else {
String::new()
};
doc_str
}
|
use serde::Serialize;
use super::schema::{events, hands, cards};
#[derive(Debug, Serialize, Queryable)]
pub struct Event {
pub id: i32,
pub year: i32,
pub circa: bool,
pub description: String,
}
#[derive(Debug, Clone, Copy, Queryable)]
pub struct Hand {
pub id: i32,
pub session_hash: i32,
pub extra_lives: i32
}
#[derive(Debug, Clone, Copy, Queryable)]
pub struct Card {
pub id: i32,
pub hand_id: i32,
pub event_id: i32,
}
#[derive(Insertable)]
#[table_name="hands"]
pub struct NewHand {
pub session_hash: i32,
pub extra_lives: i32,
}
#[derive(Insertable)]
#[table_name="cards"]
pub struct NewCard {
pub hand_id: i32,
pub event_id: i32,
}
impl Event {
pub fn new(id: i32, year: i32, circa: bool, description: &str) -> Self {
Event {
id,
year,
circa,
description: description.to_string(),
}
}
}
impl Clone for Event {
fn clone(&self) -> Self {
Self::new(
self.id,
self.year,
self.circa,
&self.description,
)
}
}
impl Hand {
pub fn new(id: i32, session_hash: i32, extra_lives: i32) -> Self {
Self { id, session_hash, extra_lives }
}
} |
#[doc = "Register `ECCR` reader"]
pub type R = crate::R<ECCR_SPEC>;
#[doc = "Register `ECCR` writer"]
pub type W = crate::W<ECCR_SPEC>;
#[doc = "Field `ADDR_ECC` reader - ECC fail address"]
pub type ADDR_ECC_R = crate::FieldReader<u32>;
#[doc = "Field `BK_ECC` reader - BK_ECC"]
pub type BK_ECC_R = crate::BitReader;
#[doc = "Field `SYSF_ECC` reader - SYSF_ECC"]
pub type SYSF_ECC_R = crate::BitReader;
#[doc = "Field `ECCIE` reader - ECC correction interrupt enable"]
pub type ECCIE_R = crate::BitReader;
#[doc = "Field `ECCIE` writer - ECC correction interrupt enable"]
pub type ECCIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECCC2` reader - ECCC2"]
pub type ECCC2_R = crate::BitReader;
#[doc = "Field `ECCC2` writer - ECCC2"]
pub type ECCC2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECCD2` reader - ECCD2"]
pub type ECCD2_R = crate::BitReader;
#[doc = "Field `ECCD2` writer - ECCD2"]
pub type ECCD2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECCC` reader - ECC correction"]
pub type ECCC_R = crate::BitReader;
#[doc = "Field `ECCC` writer - ECC correction"]
pub type ECCC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECCD` reader - ECC detection"]
pub type ECCD_R = crate::BitReader;
#[doc = "Field `ECCD` writer - ECC detection"]
pub type ECCD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:18 - ECC fail address"]
#[inline(always)]
pub fn addr_ecc(&self) -> ADDR_ECC_R {
ADDR_ECC_R::new(self.bits & 0x0007_ffff)
}
#[doc = "Bit 21 - BK_ECC"]
#[inline(always)]
pub fn bk_ecc(&self) -> BK_ECC_R {
BK_ECC_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - SYSF_ECC"]
#[inline(always)]
pub fn sysf_ecc(&self) -> SYSF_ECC_R {
SYSF_ECC_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 24 - ECC correction interrupt enable"]
#[inline(always)]
pub fn eccie(&self) -> ECCIE_R {
ECCIE_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 28 - ECCC2"]
#[inline(always)]
pub fn eccc2(&self) -> ECCC2_R {
ECCC2_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - ECCD2"]
#[inline(always)]
pub fn eccd2(&self) -> ECCD2_R {
ECCD2_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - ECC correction"]
#[inline(always)]
pub fn eccc(&self) -> ECCC_R {
ECCC_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - ECC detection"]
#[inline(always)]
pub fn eccd(&self) -> ECCD_R {
ECCD_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 24 - ECC correction interrupt enable"]
#[inline(always)]
#[must_use]
pub fn eccie(&mut self) -> ECCIE_W<ECCR_SPEC, 24> {
ECCIE_W::new(self)
}
#[doc = "Bit 28 - ECCC2"]
#[inline(always)]
#[must_use]
pub fn eccc2(&mut self) -> ECCC2_W<ECCR_SPEC, 28> {
ECCC2_W::new(self)
}
#[doc = "Bit 29 - ECCD2"]
#[inline(always)]
#[must_use]
pub fn eccd2(&mut self) -> ECCD2_W<ECCR_SPEC, 29> {
ECCD2_W::new(self)
}
#[doc = "Bit 30 - ECC correction"]
#[inline(always)]
#[must_use]
pub fn eccc(&mut self) -> ECCC_W<ECCR_SPEC, 30> {
ECCC_W::new(self)
}
#[doc = "Bit 31 - ECC detection"]
#[inline(always)]
#[must_use]
pub fn eccd(&mut self) -> ECCD_W<ECCR_SPEC, 31> {
ECCD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Flash ECC register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ECCR_SPEC;
impl crate::RegisterSpec for ECCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`eccr::R`](R) reader structure"]
impl crate::Readable for ECCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`eccr::W`](W) writer structure"]
impl crate::Writable for ECCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ECCR to value 0"]
impl crate::Resettable for ECCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::{StreamReader, StreamBuilder};
/// provides types with serialization functions
pub trait Serializeable {
/// deserializes a byte array into a type
fn deserialize(&mut self, reader: &StreamReader);
/// serializes a type into a byte array
fn serialize(&self, builder: &mut StreamBuilder);
}
|
pub mod acceptxmr;
pub mod amplifier_optimizer;
pub mod archviz;
pub mod mnist_tutorial;
pub mod quadcopter;
pub mod this_website;
// Remember, all bindgen functions must be unique and in base scope.
pub use acceptxmr::*;
pub use amplifier_optimizer::*;
pub use archviz::*;
pub use mnist_tutorial::*;
pub use quadcopter::*;
pub use this_website::*;
|
mod game;
mod input;
fn main() {
let mut board = input::init();
let _board = &mut board;
_board.print();
input::get_command(_board);
} |
fn main() {
println!("Hello, world!");
let a = 1;
println!("原变量值:{}",a);
let a = a+1;
println!("声明同名变量隐藏a原来的值,去原来的值加1,最后值:{}",a);
let mut m="hello";
let mut n=m.clone();
println!("m original value:{}",m);
println!("n original value:{}",n);
n="ok";
println!("m original value:{}",m);
println!("n original value:{}",n);
}
|
#![allow(incomplete_features)]
#![feature(generic_associated_types)]
#![feature(const_generics)]
pub mod opencv; |
use {
std::{env, path::PathBuf, process},
structopt::StructOpt,
util::Result,
};
const SUPPORTED_ARCHES: [&str; 2] = ["x86_64", "aarch64"];
#[derive(StructOpt)]
struct Opt {
#[structopt(subcommand)]
subcommand: Subcommand,
}
#[derive(StructOpt)]
enum Subcommand {
/// create a single-file executable
Pack {
/// directory of the app you want to package
#[structopt(parse(from_os_str))]
app_dir: PathBuf,
/// the final packaged exe to be generated
#[structopt(parse(from_os_str))]
output_path: PathBuf,
/// onex_loader.exe file (default use loader bundled with onex.exe)
#[structopt(long = "loader", conflicts_with("architecture"), parse(from_os_str))]
loader_path: Option<PathBuf>,
/// The architecture of the loader you want to use (default the host architecture)
#[structopt(long = "arch", possible_values(&SUPPORTED_ARCHES), default_value = env::consts::ARCH)]
architecture: String,
},
/// swap out a loader in one packed app for another
Swap {
/// the packaged app you want to modify
#[structopt(parse(from_os_str))]
app_path: PathBuf,
/// the new onex_loader.exe file (default use loader bundled with onex.exe)
#[structopt(long = "loader", parse(from_os_str))]
loader_path: Option<PathBuf>,
/// the final packaged exe to be generated (default modify in place)
#[structopt(long = "output", conflicts_with("architecture"), parse(from_os_str))]
output_path: Option<PathBuf>,
/// The architecture of the loader you want to use (default the host architecture)
#[structopt(long = "arch", possible_values(&SUPPORTED_ARCHES), default_value = env::consts::ARCH)]
architecture: String,
},
/// List the contents of an onex app
List {
/// the packaged app you want to list the contents of
#[structopt(parse(from_os_str))]
app_path: PathBuf,
},
/// Extract the contents of an onex app
Extract {
/// the packaged app you want to extract the contents of
#[structopt(parse(from_os_str))]
app_path: PathBuf,
/// the directory to extract to
#[structopt(parse(from_os_str))]
output_path: PathBuf,
},
/// Succeeds if the given file is an onex app, fails otherwise
Check {
/// the packaged app you want to check
#[structopt(parse(from_os_str))]
app_path: PathBuf,
},
/// Mounts the onex app to the given directory
Mount {
/// the packaged app you want to mount
#[structopt(parse(from_os_str))]
app_path: PathBuf,
/// the directory to mount to
#[structopt(parse(from_os_str))]
mount_path: PathBuf,
},
}
fn main() -> Result<()> {
let opt = Opt::from_args();
let exit_code = match opt.subcommand {
Subcommand::Pack {
app_dir,
output_path,
loader_path,
architecture,
} => onex::package_app(app_dir, output_path, loader_path, architecture).map(|_| 0),
Subcommand::Swap {
app_path,
loader_path,
output_path,
architecture,
} => onex::swap_app_loader(app_path, loader_path, output_path, architecture).map(|_| 0),
Subcommand::List { app_path } => onex::list_app_contents(app_path).map(|_| 0),
Subcommand::Extract {
app_path,
output_path,
} => onex::extract_app_contents(app_path, output_path).map(|_| 0),
Subcommand::Check { app_path } => {
if onex::check_app(app_path)? {
Ok(0)
} else {
eprintln!("This is not an onex app.");
Ok(1)
}
}
Subcommand::Mount {
app_path,
mount_path,
} => onex::mount_app(app_path, mount_path).map(|_| 0),
}?;
process::exit(exit_code);
}
|
pub mod push_down_automaton;
pub mod tree_stack_automaton;
|
use std::convert::TryInto;
use std::io::{self, BufRead};
fn modinv(a: isize, module: isize) -> isize {
let mut mn = (module, a);
let mut xy = (0, 1);
while mn.1 != 0 {
xy = (xy.1, xy.0 - (mn.0 / mn.1) * xy.1);
mn = (mn.1, mn.0 % mn.1);
}
while xy.0 < 0 {
xy.0 += module;
}
xy.0
}
fn part1(n: usize, xs: &[usize]) -> usize {
let x = xs.iter().min_by_key(|&x| x - n % x).unwrap();
(x - n % x) * x
}
fn part2(xs: &[(usize, usize)]) -> usize {
let mut i = 0;
let mut x = 1;
let mut k: usize;
let mut m: usize;
for &(j, y) in xs {
m = modinv(x as isize, y as isize).try_into().unwrap();
k = (i + j) * m % y;
k = (y - k) % y;
i += k * x;
x *= y;
}
i
}
fn main() {
let stdin = io::stdin();
let mut it = stdin.lock().lines().filter_map(|x| x.ok());
let n: usize = it.next().unwrap().parse().unwrap();
let xs: Vec<_> = it
.next()
.unwrap()
.split(',')
.enumerate()
.filter_map(|(i, x)| Some((i, x.parse().ok()?)))
.collect();
let xs1: Vec<_> = xs.iter().map(|&(_, x)| x).collect();
let xs2 = xs;
let result = part1(n, &xs1);
println!("Part 1: {}", result);
let result = part2(&xs2);
println!("Part 2: {}", result);
}
|
fn main() {
// 一般的な四則演算子
let a = 20 + 10 ;
println!("a is {}", a );
let a = 20 - 10 ;
println!("a is {}", a );
let a = 20 * 10 ;
println!("a is {}", a );
let a = 20 / 10 ;
println!("a is {}", a );
let a = 20 % 3 ;
println!("a is {}", a );
// 割り算で整数と実数の違い
let a = 10 ;
let b = 3 ;
let ans = a / b ;
println!("a / b is {}", ans );
let a = 10.0 ;
let b = 3.0 ;
let ans = a / b ;
println!("a / b is {}", ans );
// 加算しながら代入ができる
let mut a = 10 ;
a += 20 ;
// a = a + 20 ; と同じ
println!("a is {}", a );
let mut sum = 0;
for i in 0..10 {
sum += i ;
}
println!("sum is {}", sum );
// シフト演算子
let a : u8 = 0x02 ;
println!("a << 1 is {}", a << 1 );
println!("a >> 1 is {}", a >> 1 );
// ビット演算子
let a : u8 = 0b1111 ;
let b : u8 = 0b0011 ;
println!("a & b is {:04b}", a & b );
println!("a | b is {:04b}", a | b );
// 論理演算子
let a = true ;
let b = false ;
println!("a && b is {}", a && b );
println!("a || b is {}", a || b );
// 単項演算子
let a = true ;
let b = !a ;
println!("a is {}, b is {}", a, b );
}
|
use crate::constants::{CURVE_ORDER, GROUP_G1_SIZE, FIELD_ORDER_ELEMENT_SIZE};
use crate::errors::{SerzDeserzError, ValueError};
use crate::curve_order_elem::{CurveOrderElement, CurveOrderElementVector};
use crate::group_elem::{GroupElement, GroupElementVector};
use crate::types::{GroupG1, FP, BigNum};
use crate::utils::hash_msg;
use std::ops::{Add, AddAssign, Index, IndexMut, Mul, Neg, Sub, SubAssign};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::slice::Iter;
use rayon::prelude::*;
use serde::de::{Error as DError, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::str::{FromStr, SplitWhitespace};
use zeroize::Zeroize;
use hash2curve::HashToCurveXmd;
#[derive(Clone, Debug)]
pub struct G1 {
value: GroupG1,
}
impl GroupElement for G1 {
fn new() -> Self {
Self {
value: GroupG1::new(),
}
}
fn identity() -> Self {
let mut v = GroupG1::new();
v.inf();
Self { value: v }
}
/// This is an arbitrary choice. Any group element can be a generator
fn generator() -> Self {
GroupG1::generator().into()
}
fn is_identity(&self) -> bool {
self.value.is_infinity()
}
fn set_to_identity(&mut self) {
self.value.inf()
}
fn from_msg_hash(msg: &[u8]) -> Self {
GroupG1::mapit(&hash_msg(msg)).into()
}
#[cfg(feature = "bls381")]
fn hash_to_curve(msg: &[u8], dst: &hash2curve::DomainSeparationTag) -> Self {
let hasher = hash2curve::bls381g1::Bls12381G1Sswu::new(dst.clone());
match hasher.hash_to_curve_xmd::<sha2::Sha256>(msg) {
Ok(p) => {
p.0.into()
},
Err(_) => Self::identity()
}
}
#[cfg(not(feature = "bls381"))]
fn hash_to_curve(msg: &[u8], dst: &hash2curve::DomainSeparationTag) -> Self {
unimplemented!();
}
fn to_vec(&self) -> Vec<u8> {
let mut bytes: [u8; GROUP_G1_SIZE] = [0; GROUP_G1_SIZE];
self.write_to_slice_unchecked(&mut bytes);
bytes.to_vec()
}
fn from_slice(bytes: &[u8]) -> Result<Self, SerzDeserzError> {
if bytes.len() != GROUP_G1_SIZE {
return Err(SerzDeserzError::G1BytesIncorrectSize(
bytes.len(),
GROUP_G1_SIZE,
));
}
Ok(GroupG1::frombytes(bytes).into())
}
fn write_to_slice(&self, target: &mut [u8]) -> Result<(), SerzDeserzError> {
if target.len() != GROUP_G1_SIZE {
return Err(SerzDeserzError::G1BytesIncorrectSize(
target.len(),
GROUP_G1_SIZE,
));
}
self.write_to_slice_unchecked(target);
Ok(())
}
fn write_to_slice_unchecked(&self, target: &mut [u8]) {
let mut temp = GroupG1::new();
temp.copy(&self.value);
temp.tobytes(target, false);
}
fn add_assign_(&mut self, b: &Self) {
self.value.add(&b.value);
}
fn sub_assign_(&mut self, b: &Self) {
self.value.sub(&b.value);
}
fn plus(&self, b: &Self) -> Self {
let mut sum = self.value.clone();
sum.add(&b.value);
sum.into()
}
fn minus(&self, b: &Self) -> Self {
let mut diff = self.value.clone();
diff.sub(&b.value);
diff.into()
}
fn scalar_mul_const_time(&self, a: &CurveOrderElement) -> Self {
self.value.mul(&a.to_bignum()).into()
}
fn double(&self) -> Self {
let mut d = self.value.clone();
d.dbl();
d.into()
}
fn double_mut(&mut self) {
self.value.dbl();
}
fn to_hex(&self) -> String {
self.value.to_hex()
}
fn from_hex(s: String) -> Result<Self, SerzDeserzError> {
let mut iter = s.split_whitespace();
let x = parse_hex_as_fp(&mut iter)?;
let y = parse_hex_as_fp(&mut iter)?;
let z = parse_hex_as_fp(&mut iter)?;
let mut value = GroupG1::new();
value.setpx(x);
value.setpy(y);
value.setpz(z);
Ok(Self { value })
}
fn negation(&self) -> Self {
let mut n = self.to_ecp();
n.neg();
n.into()
}
fn is_extension() -> bool {
return false;
}
fn has_correct_order(&self) -> bool {
return self.value.mul(&CURVE_ORDER).is_infinity();
}
}
impl G1 {
/// Computes sum of 2 scalar multiplications.
/// Faster than doing the scalar multiplications individually and then adding them. Uses lookup table
/// returns self*a + h*b
pub fn binary_scalar_mul(&self, h: &Self, a: &CurveOrderElement, b: &CurveOrderElement) -> Self {
self.value
.mul2(&a.to_bignum(), &h.to_ecp(), &b.to_bignum())
.into()
}
pub fn to_bytes(&self) -> [u8; 2 * FIELD_ORDER_ELEMENT_SIZE] {
let mut bytes = [0u8; 2 * FIELD_ORDER_ELEMENT_SIZE + 1];
let mut temp = GroupG1::new();
temp.copy(&self.value);
temp.tobytes(bytes.as_mut(), false);
*array_ref![bytes, 1, 2 * FIELD_ORDER_ELEMENT_SIZE]
}
pub fn to_compressed_bytes(&self) -> [u8; FIELD_ORDER_ELEMENT_SIZE] {
let mut bytes = [0u8; FIELD_ORDER_ELEMENT_SIZE + 1];
let mut temp = GroupG1::new();
temp.copy(&self.value);
temp.tobytes(bytes.as_mut(), true);
bytes[1] |= (bytes[0] & 1) << 7;
*array_ref![bytes, 1, FIELD_ORDER_ELEMENT_SIZE]
}
}
impl From<[u8; 2 * FIELD_ORDER_ELEMENT_SIZE]> for G1 {
fn from(data: [u8; 2 * FIELD_ORDER_ELEMENT_SIZE]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; 2 * FIELD_ORDER_ELEMENT_SIZE]> for G1 {
fn from(data: &[u8; 2 * FIELD_ORDER_ELEMENT_SIZE]) -> Self {
let x = BigNum::frombytes(&data[..FIELD_ORDER_ELEMENT_SIZE]);
let y = BigNum::frombytes(&data[FIELD_ORDER_ELEMENT_SIZE..]);
Self { value: GroupG1::new_bigs(&x, &y) }
}
}
impl From<[u8; FIELD_ORDER_ELEMENT_SIZE]> for G1 {
fn from(data: [u8; FIELD_ORDER_ELEMENT_SIZE]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; FIELD_ORDER_ELEMENT_SIZE]> for G1 {
fn from(data: &[u8; FIELD_ORDER_ELEMENT_SIZE]) -> Self {
let parity = ((data[0] >> 7) & 1u8) as isize;
let mut temp = data.clone();
temp[0] &= 0x7F;
Self { value: GroupG1::new_bigint(&BigNum::frombytes(&temp[..]), parity) }
}
}
impl_group_elem_traits!(G1, GroupG1);
impl_group_elem_conversions!(G1, GroupG1, GROUP_G1_SIZE);
impl_group_elem_ops!(G1);
impl_scalar_mul_ops!(G1);
impl_group_element_lookup_table!(G1, G1LookupTable);
// Represents an element of the sub-group of the elliptic curve over the prime field
impl_optmz_scalar_mul_ops!(G1, GroupG1, G1LookupTable);
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct G1Vector {
elems: Vec<G1>,
}
impl_group_elem_vec_ops!(G1, G1Vector);
impl_group_elem_vec_product_ops!(G1, G1Vector, G1LookupTable);
impl_group_elem_vec_conversions!(G1, G1Vector);
/// Parse given hex string as FP
pub fn parse_hex_as_fp(iter: &mut SplitWhitespace) -> Result<FP, SerzDeserzError> {
// Logic almost copied from AMCL but with error handling and constant time execution.
// Constant time is important as hex is used during serialization and deserialization.
// A seemingly effortless solution is to filter string for errors and pad with 0s before
// passing to AMCL but that would be expensive as the string is scanned twice
let xes = match iter.next() {
Some(i) => {
// Parsing as u32 since xes cannot be negative
match u32::from_str(i) {
Ok(xes) => xes as i32,
Err(_) => return Err(SerzDeserzError::CannotParseFP),
}
}
None => return Err(SerzDeserzError::CannotParseFP),
};
let x = match iter.next() {
Some(i) => CurveOrderElement::parse_hex_as_bignum(i.to_string())?,
None => return Err(SerzDeserzError::CannotParseFP),
};
Ok(FP { x, xes })
}
#[cfg(test)]
mod test {
use super::*;
use std::time::Instant;
use hash2curve::DomainSeparationTag;
#[test]
fn compressed_tests() {
let g1 = G1::generator();
let compressed_bytes = g1.to_compressed_bytes();
let g1_1 = G1::from(compressed_bytes);
assert_eq!(g1, g1_1);
let e = G1::from_hex("1 0546197CDBA187E858730894C66FAEB35E7DBE4C61646786FB85B3EBB78377B1711797A884CBE8302A23463FFFD00190 1 11CF30309EA1AF4BB47FC4D5219529347F9576201EE34DE933C96F83FBB8B2AC22387B593C5F148924B571FE605B337F 2 13317C30F3A0D636D56A23C34FDD80B891ECBDE7C2B7D6E16B0F4B0B7E6D26CB6147ACDE629C4A23C57400D203A9FB84".to_string()).unwrap();
let compressed_bytes = e.to_compressed_bytes();
let e1 = G1::from(compressed_bytes);
assert_eq!(e, e1);
}
#[test]
fn test_hash_to_curve() {
let e = G1::from_hex("1 060595F69756F0528EAACAC84E6844CEBCB844042DC456EC738F7332221BF27CA38E6C3DC2FD8710FFE37ECB92779E46 1 153980BAF12F8EB31BBF9D8CACE7FB750AB76CEE4DD118CCB1A27637BE878544128ABDDA8F7E22A393B7920547BF6DEC 2 13317C30F3A0D636D56A23C34FDD80B891ECBDE7C2B7D6E16B0F4B0B7E6D26CB6147ACDE629C4A23C57400D203A9FB84".to_string()).unwrap();
let dst = DomainSeparationTag::new(b"hash_to_curve_", Some(b"test"), None, None).unwrap();
let g = G1::hash_to_curve(b"message to be hashed", &dst);
assert!(!g.is_identity());
assert_eq!(e, g);
}
#[test]
fn test_parse_hex_for_fp() {
// TODO:
}
#[test]
fn test_parse_bad_hex_for_fp() {
// TODO:
}
#[test]
fn test_binary_scalar_mul() {
for _ in 0..10 {
let a = CurveOrderElement::random();
let b = CurveOrderElement::random();
let g = G1::random();
let h = G1::random();
assert_eq!(&g * &a + &h * &b, g.binary_scalar_mul(&h, &a, &b))
}
}
#[test]
fn test_multiples() {
for _ in 0..10 {
let a = G1::random();
let mults = a.get_multiples(17);
for i in 1..=17 {
assert_eq!(mults[i - 1], (&a * CurveOrderElement::from(i as u8)));
}
}
}
#[test]
fn timing_multi_scalar_multiplication() {
let mut fs = vec![];
let mut gs = vec![];
let n = 64;
for _ in 0..n {
fs.push(CurveOrderElement::random());
gs.push(G1::random());
}
let gv = G1Vector::from(gs.as_slice());
let fv = CurveOrderElementVector::from(fs.as_slice());
let mut start = Instant::now();
let res = gv.multi_scalar_mul_const_time_naive(&fv).unwrap();
let const_time_naive = start.elapsed();
start = Instant::now();
let res_1 = gv.multi_scalar_mul_var_time(fv.iter()).unwrap();
let var_time = start.elapsed();
assert_eq!(res_1, res);
let lookup_tables: Vec<_> = gv
.as_slice()
.into_iter()
.map(|e| e.to_wnaf_lookup_table(5))
.collect();
let f_refs: Vec<&CurveOrderElement> = fs.iter().map(|f| f).collect();
start = Instant::now();
let res_2 =
G1Vector::multi_scalar_mul_var_time_with_precomputation_done(&lookup_tables, f_refs)
.unwrap();
let var_precomp_time = start.elapsed();
assert_eq!(res_2, res);
start = Instant::now();
let res_3 = gv.multi_scalar_mul_const_time(fv.as_ref()).unwrap();
let const_time = start.elapsed();
assert_eq!(res_3, res);
let group_elem_multiples: Vec<_> = gv
.as_slice()
.into_iter()
.map(|e| e.get_multiples(7))
.collect();
start = Instant::now();
let res_4 = G1Vector::multi_scalar_mul_const_time_with_precomputation_done(
&group_elem_multiples,
fv.as_slice(),
)
.unwrap();
let const_precomp_time = start.elapsed();
assert_eq!(res_4, res);
println!(
"Constant time for {} size multi-scalar multiplications using naive method: {:?}",
n, const_time_naive
);
println!(
"Constant time for {} size multi-scalar multiplications: {:?}",
n, const_time
);
println!(
"Constant time with pre-computation for {} size multi-scalar multiplications: {:?}",
n, const_precomp_time
);
println!(
"Variable time for {} size multi-scalar multiplications: {:?}",
n, var_time
);
println!(
"Variable time with pre-computation for {} size multi-scalar multiplications: {:?}",
n, var_precomp_time
);
}
#[test]
fn timing_wnaf_mul() {
let mut fs = vec![];
let mut gs = vec![];
let n = 64;
let w = 5;
for _ in 0..n {
fs.push(CurveOrderElement::random());
gs.push(G1::random());
}
let gv = G1Vector::from(gs.as_slice());
let fv = CurveOrderElementVector::from(fs.as_slice());
let mut start = Instant::now();
for i in 0..n {
// The compiler might not execute the statement below
let _ = &gv[i] * &fv[i];
}
println!(
"Time for {} scalar multiplications: {:?}",
n,
start.elapsed()
);
start = Instant::now();
for i in 0..n {
let naf = fv[i].to_wnaf(w);
let table = gv[i].to_wnaf_lookup_table(w);
// The compiler might not execute the statement below
G1::wnaf_mul(&table, &naf);
}
println!(
"Time for {} scalar multiplications using wnaf: {:?}",
n,
start.elapsed()
);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.