text stringlengths 8 4.13M |
|---|
fn main() {
for n in 1..101 {
if n % 15 == 0 {
println!("fizzbuzz");
} else if n % 5 == 0 {
println!("buzz");
} else if n % 3 == 0 {
println!("fizz");
} else {
println!("{}", n);
}
}
for n in 1..=100 {
if n % 15 == 0 {
println!("fizzbuzz2");
} else if n % 5 == 0 {
println!("buzz2");
} else if n % 3 == 0 {
println!("fizz2");
} else {
println!("{}", n);
}
}
let names = vec!["Bob", "Frank", "Ferris"];
for name in names.iter() {
match name {
&"Ferris" => println!("There is a rustacean among us!"),
_ => println!("Hello {}", name),
}
}
println!("{:?}", names);
let names2 = vec!["Bob", "Frank", "Ferris"];
for name in names2.into_iter() {
match name {
"Ferris" => println!("There is a rustacean among us!"),
_ => println!("Hello {}", name),
}
}
//println!("{:?}", names2);// error: use of moved value
// once the collection has been consumed it is no longer available for reuse
let mut names3 = vec!["Bob", "Frank", "Ferris"];
for name in names3.iter_mut() {
match name {
&mut "Ferris" => println!("There is a rustacean among us!"),
_ => println!("Hello {}", name),
}
}
println!("{:?}", names3);
} |
#[doc = "Register `SQR1` reader"]
pub type R = crate::R<SQR1_SPEC>;
#[doc = "Register `SQR1` writer"]
pub type W = crate::W<SQR1_SPEC>;
#[doc = "Field `SQ25` reader - 25th conversion in regular sequence"]
pub type SQ25_R = crate::FieldReader;
#[doc = "Field `SQ25` writer - 25th conversion in regular sequence"]
pub type SQ25_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `SQ26` reader - 26th conversion in regular sequence"]
pub type SQ26_R = crate::FieldReader;
#[doc = "Field `SQ26` writer - 26th conversion in regular sequence"]
pub type SQ26_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `SQ27` reader - 27th conversion in regular sequence"]
pub type SQ27_R = crate::FieldReader;
#[doc = "Field `SQ27` writer - 27th conversion in regular sequence"]
pub type SQ27_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `SQ28` reader - 28th conversion in regular sequence"]
pub type SQ28_R = crate::FieldReader;
#[doc = "Field `SQ28` writer - 28th conversion in regular sequence"]
pub type SQ28_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
#[doc = "Field `L` reader - Regular channel sequence length"]
pub type L_R = crate::FieldReader;
#[doc = "Field `L` writer - Regular channel sequence length"]
pub type L_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:4 - 25th conversion in regular sequence"]
#[inline(always)]
pub fn sq25(&self) -> SQ25_R {
SQ25_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bits 5:9 - 26th conversion in regular sequence"]
#[inline(always)]
pub fn sq26(&self) -> SQ26_R {
SQ26_R::new(((self.bits >> 5) & 0x1f) as u8)
}
#[doc = "Bits 10:14 - 27th conversion in regular sequence"]
#[inline(always)]
pub fn sq27(&self) -> SQ27_R {
SQ27_R::new(((self.bits >> 10) & 0x1f) as u8)
}
#[doc = "Bits 15:19 - 28th conversion in regular sequence"]
#[inline(always)]
pub fn sq28(&self) -> SQ28_R {
SQ28_R::new(((self.bits >> 15) & 0x1f) as u8)
}
#[doc = "Bits 20:23 - Regular channel sequence length"]
#[inline(always)]
pub fn l(&self) -> L_R {
L_R::new(((self.bits >> 20) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:4 - 25th conversion in regular sequence"]
#[inline(always)]
#[must_use]
pub fn sq25(&mut self) -> SQ25_W<SQR1_SPEC, 0> {
SQ25_W::new(self)
}
#[doc = "Bits 5:9 - 26th conversion in regular sequence"]
#[inline(always)]
#[must_use]
pub fn sq26(&mut self) -> SQ26_W<SQR1_SPEC, 5> {
SQ26_W::new(self)
}
#[doc = "Bits 10:14 - 27th conversion in regular sequence"]
#[inline(always)]
#[must_use]
pub fn sq27(&mut self) -> SQ27_W<SQR1_SPEC, 10> {
SQ27_W::new(self)
}
#[doc = "Bits 15:19 - 28th conversion in regular sequence"]
#[inline(always)]
#[must_use]
pub fn sq28(&mut self) -> SQ28_W<SQR1_SPEC, 15> {
SQ28_W::new(self)
}
#[doc = "Bits 20:23 - Regular channel sequence length"]
#[inline(always)]
#[must_use]
pub fn l(&mut self) -> L_W<SQR1_SPEC, 20> {
L_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "regular sequence register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sqr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sqr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SQR1_SPEC;
impl crate::RegisterSpec for SQR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sqr1::R`](R) reader structure"]
impl crate::Readable for SQR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sqr1::W`](W) writer structure"]
impl crate::Writable for SQR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SQR1 to value 0"]
impl crate::Resettable for SQR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use io::hid;
use realtime;
fn wait_for_button_mask(active: u16, pressed: u16, prev_ref: &mut u16) -> u16 {
loop {
let curr = hid::pressed_mask();
let prev = *prev_ref;
*prev_ref = curr;
if (curr ^ pressed) & active == 0 && (curr ^ prev) & active != 0 {
return curr
}
let _ = realtime::try_msleep(10);
}
}
fn wait_for_any_mask(active: u16, prev_ref: &mut u16) -> u16 {
loop {
let curr = hid::pressed_mask();
let this_prev = *prev_ref;
*prev_ref = curr;
if (curr & !this_prev) & active != 0 {
return curr
}
let _ = realtime::try_msleep(10);
}
}
#[inline(always)]
pub fn wait_for_any() -> hid::ButtonsPressed {
let mut prev_state = hid::pressed_mask();
wait_for_any_mask(!0, &mut prev_state).into()
}
#[inline(always)]
pub fn wait_for_any_of(which: &[hid::Button]) -> hid::ButtonsPressed {
let mut prev_state = hid::pressed_mask();
let mut mask = 0;
for button in which {
mask |= 1 << (*button as u16);
}
wait_for_any_mask(mask, &mut prev_state).into()
}
#[inline(always)]
pub fn wait_for_all_of(which: &[hid::Button]) -> hid::ButtonsPressed {
let mut prev_state = hid::pressed_mask();
let mut mask = 0;
for button in which {
mask |= 1 << (*button as u16);
}
wait_for_button_mask(mask, mask, &mut prev_state).into()
}
|
use crate::ray::*;
use crate::vec3::*;
use rand::rngs::ThreadRng;
use rand::Rng;
#[derive(Debug)]
pub struct Camera {
pub origin: Vec3,
pub horizontal: Vec3,
pub vertical: Vec3,
pub lower_left: Vec3,
pub lens_radius: f64,
pub u: Vec3,
pub v: Vec3,
}
impl Camera {
fn random_in_unit_disk(rng: &mut ThreadRng) -> Vec3 {
loop {
let p = 2.0 * Vec3(rng.gen::<f64>() - 0.5, rng.gen::<f64>() - 0.5, 0.0);
if p.squared_length() < 1.0 {
return p;
}
}
}
pub fn new(
origin: Vec3,
look_at: Vec3,
up: Vec3,
fov: f64,
aspect: f64,
aperture: f64,
focus: f64,
) -> Camera {
let theta = fov * std::f64::consts::PI / 180.0;
let half_width = (0.5 * theta).tan();
let half_height = half_width / aspect;
// let theta = vfov * std::f64::consts::PI / 180.0;
// let half_height = (theta * 0.5).tan();
// let half_width = aspect * half_height;
let w = (origin - look_at).normalized();
let u = up.cross(w).normalized();
let v = w.cross(u).normalized();
let focus_dist = focus * (look_at - origin).length();
Camera {
origin,
horizontal: 2.0 * half_width * focus_dist * u,
vertical: 2.0 * half_height * focus_dist * v,
lower_left: origin
- half_width * focus_dist * u
- half_height * focus_dist * v
- focus_dist * w,
lens_radius: 0.5 * aperture,
u,
v,
}
}
pub fn get_ray(&self, s: f64, t: f64, rng: &mut ThreadRng) -> Ray {
let rd = self.lens_radius * Self::random_in_unit_disk(rng);
let offset = self.u * rd.x() + self.v * rd.y();
Ray {
pos: self.origin + offset,
dir: self.lower_left + s * self.horizontal + t * self.vertical - self.origin - offset,
}
}
}
|
fn main() {
proconio::input! {
n: usize,
}
let ans = n * (n - 1) / 2;
println!("{}", ans);
}
|
// This file was generated
pub mod fs;
pub mod net;
|
use core::ops::Mul;
/// This code is inspired from Dalek's field multiplication for 64-bits backends contained in the
/// file [`src/backend/u64/field.rs`](https://github.com/dalek-cryptography/curve25519-dalek/blob/master/src/backend/u64/field.rs)
use secret_integers::*;
/// A `FieldElement64` represents an element of the field
/// \\( \mathbb Z / (2\^{255} - 19)\\).
///
/// In the 64-bit implementation, a `FieldElement` is represented in
/// radix \\(2\^{51}\\) as five `u64`s; the coefficients are allowed to
/// grow up to \\(2\^{54}\\) between reductions modulo \\(p\\).
///
/// # Note
///
/// The `curve25519_dalek::field` module provides a type alias
/// `curve25519_dalek::field::FieldElement` to either `FieldElement64`
/// or `FieldElement32`.
///
/// The backend-specific type `FieldElement64` should not be used
/// outside of the `curve25519_dalek::field` module.
type Limb = U64;
#[derive(Copy, Clone)]
pub struct FieldElement64(pub(crate) [Limb; 5]);
impl<'a, 'b> Mul<&'b FieldElement64> for &'a FieldElement64 {
type Output = FieldElement64;
fn mul(self, _rhs: &'b FieldElement64) -> FieldElement64 {
/// Helper function to multiply two 64-bit integers with 128
/// bits of output.
#[inline(always)]
fn m(x: U64, y: U64) -> U128 {
U128::from(x) * y.into()
}
// Alias self, _rhs for more readable formulas
let a: &[Limb; 5] = &self.0;
let b: &[Limb; 5] = &_rhs.0;
// Precondition: assume input limbs a[i], b[i] are bounded as
//
// a[i], b[i] < 2^(51 + b)
//
// where b is a real parameter measuring the "bit excess" of the limbs.
// 64-bit precomputations to avoid 128-bit multiplications.
//
// This fits into a u64 whenever 51 + b + lg(19) < 64.
//
// Since 51 + b + lg(19) < 51 + 4.25 + b
// = 55.25 + b,
// this fits if b < 8.75.
let nineteen = 19u64.into();
let b1_19 = b[1] * nineteen;
let b2_19 = b[2] * nineteen;
let b3_19 = b[3] * nineteen;
let b4_19 = b[4] * nineteen;
// Multiply to get 128-bit coefficients of output
let c0: U128 =
m(a[0], b[0]) + m(a[4], b1_19) + m(a[3], b2_19) + m(a[2], b3_19) + m(a[1], b4_19);
let mut c1: U128 =
m(a[1], b[0]) + m(a[0], b[1]) + m(a[4], b2_19) + m(a[3], b3_19) + m(a[2], b4_19);
let mut c2: U128 =
m(a[2], b[0]) + m(a[1], b[1]) + m(a[0], b[2]) + m(a[4], b3_19) + m(a[3], b4_19);
let mut c3: U128 =
m(a[3], b[0]) + m(a[2], b[1]) + m(a[1], b[2]) + m(a[0], b[3]) + m(a[4], b4_19);
let mut c4: U128 =
m(a[4], b[0]) + m(a[3], b[1]) + m(a[2], b[2]) + m(a[1], b[3]) + m(a[0], b[4]);
// How big are the c[i]? We have
//
// c[i] < 2^(102 + 2*b) * (1+i + (4-i)*19)
// < 2^(102 + lg(1 + 4*19) + 2*b)
// < 2^(108.27 + 2*b)
//
// The carry (c[i] >> 51) fits into a u64 when
// 108.27 + 2*b - 51 < 64
// 2*b < 6.73
// b < 3.365.
//
// So we require b < 3 to ensure this fits.
debug_assert!(U64::declassify(a[0]) < (1 << 54));
debug_assert!(U64::declassify(b[0]) < (1 << 54));
debug_assert!(U64::declassify(a[1]) < (1 << 54));
debug_assert!(U64::declassify(b[1]) < (1 << 54));
debug_assert!(U64::declassify(a[2]) < (1 << 54));
debug_assert!(U64::declassify(b[2]) < (1 << 54));
debug_assert!(U64::declassify(a[3]) < (1 << 54));
debug_assert!(U64::declassify(b[3]) < (1 << 54));
debug_assert!(U64::declassify(a[4]) < (1 << 54));
debug_assert!(U64::declassify(b[4]) < (1 << 54));
// Casting to u64 and back tells the compiler that the carry is
// bounded by 2^64, so that the addition is a u128 + u64 rather
// than u128 + u128.
const LOW_51_BIT_MASK: u64 = (1u64 << 51) - 1;
let mut out = [U64::classify(0u64); 5];
c1 += U64::from(c0 >> 51).into();
out[0] = U64::from(c0) & LOW_51_BIT_MASK.into();
c2 += U64::from(c1 >> 51).into();
out[1] = U64::from(c1) & LOW_51_BIT_MASK.into();
c3 += U64::from(c2 >> 51).into();
out[2] = U64::from(c2) & LOW_51_BIT_MASK.into();
c4 += U64::from(c3 >> 51).into();
out[3] = U64::from(c3) & LOW_51_BIT_MASK.into();
let carry: U64 = (c4 >> 51).into();
out[4] = U64::from(c4) & LOW_51_BIT_MASK.into();
// To see that this does not overflow, we need out[0] + carry * 19 < 2^64.
//
// c4 < a0*b4 + a1*b3 + a2*b2 + a3*b1 + a4*b0 + (carry from c3)
// < 5*(2^(51 + b) * 2^(51 + b)) + (carry from c3)
// < 2^(102 + 2*b + lg(5)) + 2^64.
//
// When b < 3 we get
//
// c4 < 2^110.33 so that carry < 2^59.33
//
// so that
//
// out[0] + carry * 19 < 2^51 + 19 * 2^59.33 < 2^63.58
//
// and there is no overflow.
out[0] = out[0] + carry * nineteen;
// Now out[1] < 2^51 + 2^(64 -51) = 2^51 + 2^13 < 2^(51 + epsilon).
out[1] += out[0] >> 51;
out[0] &= LOW_51_BIT_MASK.into();
// Now out[i] < 2^(51 + epsilon) for all i.
FieldElement64(out)
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::move_resource::MoveResource;
use anyhow::{format_err, Result};
use num_enum::{IntoPrimitive, TryFromPrimitive};
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use std::fmt::{self, Formatter};
use std::str::FromStr;
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize)]
pub enum StdlibVersion {
Latest,
Version(VersionNumber),
}
type VersionNumber = u64;
impl StdlibVersion {
pub fn new(version: u64) -> Self {
StdlibVersion::Version(version)
}
pub fn as_string(&self) -> String {
match self {
StdlibVersion::Latest => "latest".to_string(),
StdlibVersion::Version(version) => format!("{}", version),
}
}
pub fn version(&self) -> u64 {
match self {
StdlibVersion::Latest => 0,
StdlibVersion::Version(version) => *version,
}
}
}
impl Default for StdlibVersion {
fn default() -> Self {
StdlibVersion::Latest
}
}
impl FromStr for StdlibVersion {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"latest" => Ok(StdlibVersion::Latest),
s => Ok(Self::new(s.parse()?)),
}
}
}
#[derive(
Clone,
Copy,
Debug,
Deserialize,
Eq,
Hash,
PartialEq,
PartialOrd,
Ord,
Serialize,
IntoPrimitive,
TryFromPrimitive,
)]
#[repr(u8)]
#[serde(tag = "type")]
pub enum ConsensusStrategy {
Dummy = 0,
Argon = 1,
Keccak = 2,
CryptoNight = 3,
}
impl ConsensusStrategy {
pub fn value(self) -> u8 {
self.into()
}
}
impl Default for ConsensusStrategy {
fn default() -> Self {
ConsensusStrategy::Dummy
}
}
impl fmt::Display for ConsensusStrategy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ConsensusStrategy::Dummy => write!(f, "dummy"),
ConsensusStrategy::Argon => write!(f, "argon"),
ConsensusStrategy::Keccak => write!(f, "keccak"),
ConsensusStrategy::CryptoNight => write!(f, "cryptonight"),
}
}
}
impl FromStr for ConsensusStrategy {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"dummy" => Ok(ConsensusStrategy::Dummy),
"argon" => Ok(ConsensusStrategy::Argon),
"keccak" => Ok(ConsensusStrategy::Keccak),
"cryptonight" => Ok(ConsensusStrategy::CryptoNight),
s => Err(format_err!("Unknown ConsensusStrategy: {}", s)),
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub struct ChainId {
id: u8,
}
impl ChainId {
pub fn new(id: u8) -> Self {
Self { id }
}
pub fn id(self) -> u8 {
self.id
}
pub fn test() -> Self {
ChainId::new(255)
}
}
impl fmt::Display for ChainId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.id)
}
}
impl FromStr for ChainId {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let id: u8 = s.parse()?;
Ok(ChainId::new(id))
}
}
impl From<u8> for ChainId {
fn from(id: u8) -> Self {
Self::new(id)
}
}
#[allow(clippy::from_over_into)]
impl Into<u8> for ChainId {
fn into(self) -> u8 {
self.id
}
}
impl MoveResource for ChainId {
const MODULE_NAME: &'static str = "ChainId";
const STRUCT_NAME: &'static str = "ChainId";
}
|
use async_trait::async_trait;
use std::collections::HashMap;
use crate::errors::Result;
use crate::request::{get, put, Body};
use crate::{Client, QueryMeta, QueryOptions, WriteMeta, WriteOptions};
#[serde(default)]
#[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SessionID {
pub ID: String,
}
#[serde(default)]
#[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SessionEntry {
pub CreateIndex: Option<u64>,
pub ID: Option<String>,
pub Name: Option<String>,
pub Node: Option<String>,
pub LockDelay: Option<u64>, //TODO: Change this to a Durations
pub Behavior: Option<String>,
pub Checks: Option<Vec<String>>,
pub TTL: Option<String>,
}
#[async_trait]
pub trait Session {
async fn create(
&self,
session: &SessionEntry,
options: Option<&WriteOptions>,
) -> Result<(SessionEntry, WriteMeta)>;
async fn destroy(&self, id: &str, options: Option<&WriteOptions>) -> Result<(bool, WriteMeta)>;
async fn info(
&self,
id: &str,
options: Option<&QueryOptions>,
) -> Result<(Vec<SessionEntry>, QueryMeta)>;
async fn list(&self, options: Option<&QueryOptions>) -> Result<(Vec<SessionEntry>, QueryMeta)>;
async fn node(
&self,
node: &str,
options: Option<&QueryOptions>,
) -> Result<(Vec<SessionEntry>, QueryMeta)>;
async fn renew(
&self,
id: &str,
options: Option<&WriteOptions>,
) -> Result<(Vec<SessionEntry>, WriteMeta)>;
}
#[async_trait]
impl Session for Client {
async fn create(
&self,
session: &SessionEntry,
options: Option<&WriteOptions>,
) -> Result<(SessionEntry, WriteMeta)> {
put(
"/v1/session/create",
Some(Body::AsJson(session)),
&self.config,
HashMap::new(),
options,
)
.await
}
async fn destroy(&self, id: &str, options: Option<&WriteOptions>) -> Result<(bool, WriteMeta)> {
let path = format!("/v1/session/destroy/{}", id);
put(
&path,
None as Option<Body<()>>,
&self.config,
HashMap::new(),
options,
)
.await
}
async fn info(
&self,
id: &str,
options: Option<&QueryOptions>,
) -> Result<(Vec<SessionEntry>, QueryMeta)> {
let path = format!("/v1/session/info/{}", id);
get(&path, &self.config, HashMap::new(), options).await
}
async fn list(&self, options: Option<&QueryOptions>) -> Result<(Vec<SessionEntry>, QueryMeta)> {
get("/v1/session/list", &self.config, HashMap::new(), options).await
}
async fn node(
&self,
node: &str,
options: Option<&QueryOptions>,
) -> Result<(Vec<SessionEntry>, QueryMeta)> {
let path = format!("/v1/session/node/{}", node);
get(&path, &self.config, HashMap::new(), options).await
}
async fn renew(
&self,
id: &str,
options: Option<&WriteOptions>,
) -> Result<(Vec<SessionEntry>, WriteMeta)> {
let path = format!("/v1/session/renew/{}", id);
put(
&path,
None as Option<Body<()>>,
&self.config,
HashMap::new(),
options,
)
.await
}
}
|
//! Module containing basic types representing coordinate systems.
use super::tensors::{ContravariantIndex, CovariantIndex, Matrix, Tensor};
use crate::typenum::consts::U2;
use crate::typenum::uint::Unsigned;
use crate::typenum::Pow;
use generic_array::{ArrayLength, GenericArray};
use std::fmt;
use std::ops::{Index, IndexMut};
/// `CoordinateSystem` marks a struct (usually a unit struct) as representing a coordinate system.
pub trait CoordinateSystem: Sized {
/// An associated type representing the dimension of the coordinate system
type Dimension: Unsigned + ArrayLength<f64> + ArrayLength<usize>;
/// Function returning a small value for purposes of numerical differentiation.
/// What is considered a small value may depend on the point, hence the parameter.
/// Returns just 0.01 by default.
fn small(_: &Point<Self>) -> f64 {
0.01
}
/// Function returning the dimension
fn dimension() -> usize {
Self::Dimension::to_usize()
}
}
/// Struct representing a point on the manifold. The information about the coordinate system
/// is saved in the type parameter, so that only operations on objects belonging to the same
/// coordinate system will be allowed.
pub struct Point<T: CoordinateSystem> {
/// The coordinates of the point.
x: GenericArray<f64, T::Dimension>,
}
impl<T> Point<T>
where
T: CoordinateSystem,
{
/// Creates a new point with coordinates described by the array
pub fn new(coords: GenericArray<f64, T::Dimension>) -> Point<T> {
Point { x: coords }
}
/// Creates a new point with coordinates passed in the slice
pub fn from_slice(coords: &[f64]) -> Point<T> {
Point {
x: GenericArray::clone_from_slice(coords),
}
}
/// Returns the point's coordinates as an array
pub fn coords_array(&self) -> &GenericArray<f64, T::Dimension> {
&self.x
}
}
impl<T> Clone for Point<T>
where
T: CoordinateSystem,
{
fn clone(&self) -> Point<T> {
Point::new(self.x.clone())
}
}
impl<T> Copy for Point<T>
where
T: CoordinateSystem,
<T::Dimension as ArrayLength<f64>>::ArrayType: Copy,
{
}
impl<T> Index<usize> for Point<T>
where
T: CoordinateSystem,
{
type Output = f64;
fn index(&self, idx: usize) -> &f64 {
&self.x[idx]
}
}
impl<T> IndexMut<usize> for Point<T>
where
T: CoordinateSystem,
{
fn index_mut(&mut self, idx: usize) -> &mut f64 {
&mut self.x[idx]
}
}
impl<T> PartialEq<Point<T>> for Point<T>
where
T: CoordinateSystem,
{
fn eq(&self, rhs: &Point<T>) -> bool {
(0..T::dimension()).all(|i| self[i] == rhs[i])
}
}
impl<T> Eq for Point<T> where T: CoordinateSystem {}
impl<T> fmt::Debug for Point<T>
where
T: CoordinateSystem,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Point{:?}", &self.x)
}
}
/// Trait used for conversions between different coordinate systems. Implementing `ConversionTo<T>`
/// for a `CoordinateSystem` will allow objects in that system to be converted to the system `T`
/// (note that `T` also has to be a `CoordinateSystem`).
pub trait ConversionTo<T: CoordinateSystem + 'static>: CoordinateSystem
where
T::Dimension: Pow<U2>,
<T::Dimension as Pow<U2>>::Output: ArrayLength<f64>,
{
/// Function converting the coordinates of a point.
fn convert_point(p: &Point<Self>) -> Point<T>;
/// Function calculating a Jacobian at a point - that is, the matrix of derivatives
/// of the coordinate conversions.
///
/// This will be contracted with contravariant indices in the tensor.
fn jacobian(p: &Point<Self>) -> Matrix<T> {
let d = Self::dimension();
let mut result = Matrix::zero(Self::convert_point(p));
let h = Self::small(p);
for j in 0..d {
let mut x = p.clone();
x[j] = x[j] - h;
let y1 = Self::convert_point(&x);
x[j] = x[j] + h * 2.0;
let y2 = Self::convert_point(&x);
for i in 0..d {
// calculate dyi/dxj
let index = [i, j];
result[&index[..]] = (y2[i] - y1[i]) / (2.0 * h);
}
}
result
}
/// The inverse matrix of the Jacobian at a point.
///
/// In conversions, it will be contracted with covariant indices.
fn inv_jacobian(p: &Point<Self>) -> Tensor<T, (CovariantIndex, ContravariantIndex)> {
ConversionTo::<T>::jacobian(p).inverse().unwrap()
}
}
|
use std::env::set_current_dir;
pub fn is_builtin(command: &str) -> bool {
command.eq("history") || command.eq("cd") || command.starts_with('!')
}
pub fn execute_builtin(command: &str, args: &[&str], history: &Vec<String>) {
match command {
"history" => list_history(&history),
"cd" => change_working_dir(&args),
_ => println!("Unknown command!")
}
}
fn list_history(history: &Vec<String>) {
let mut i = 1;
for line in history.iter() {
println!("{}. {}", i, line);
i += 1;
}
}
fn change_working_dir(args: &[&str]) {
if args.len() != 1 {
eprintln!("cd requires exactly one argument: the directory required")
} else {
match set_current_dir(args[0]) {
Ok(_) => (),
Err(_) => eprintln!("Couldn't change directory!")
}
}
}
|
pub mod helper;
|
pub fn multiply(numbers: &[i32]) -> i64 {
numbers.iter().fold(1, | prod, i | prod * i64::from(*i))
} |
//! An expression that evaluates a sub-expression, without consuming input.
//!
//! See [`crate::Parser::check`].
use crate::parser::Parser;
use crate::span::Span;
/// The struct returned from [`crate::Parser::check`].
pub struct Check<P>(pub(crate) P);
impl<P> Parser for Check<P>
where
P: Parser,
{
type Value = ();
type Error = P::Error;
fn parse(&self, input: &'_ str) -> Result<Span<Self::Value>, Span<Self::Error>> {
self.0.parse(input).map(|_| Span::new(0..0, ()))
}
}
#[cfg(test)]
mod tests {
use quickcheck_macros::quickcheck;
use crate::expression::test_expr::*;
use crate::parser::Parser;
use crate::span::Span;
use super::Check;
#[test]
fn p_match() {
assert_eq!(
Check(TestExpr::ok(12..37)).parse("hello"),
Ok(Span::new(0..0, ()))
);
}
#[test]
fn p_error() {
assert_eq!(
Check(TestExpr::err(12..37)).parse("hello"),
Err(Span::new(12..37, TestError))
);
}
#[quickcheck]
fn parse(p: TestExpr, input: String) {
assert_eq!(
Check(&p).parse(&input),
match p {
ParseMatch(_, _) => Ok(Span::new(0..0, ())),
ParseError(config) => Err(Span::new(config.range(), TestError)),
}
);
}
}
|
// MIT License
//
// Copyright (c) 2018-2021 Hans-Martin Will
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use std::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
use std::fmt;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Name {
string: String,
}
impl Name {
pub fn new(string: String) -> Name {
Name { string }
}
pub fn as_str(&self) -> &str {
self.string.as_str()
}
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.string.fmt(f)
}
}
impl<'a> From<&'a str> for Name {
fn from(value: &'a str) -> Name {
Name::new(String::from(value))
}
}
impl From<String> for Name {
fn from(value: String) -> Name {
Name::new(value)
}
}
impl PartialEq for Name {
fn eq(&self, other: &Name) -> bool {
let self_iter = self.string.chars().flat_map(|c| c.to_uppercase());
let other_iter = other.string.chars().flat_map(|c| c.to_uppercase());
self_iter.eq(other_iter)
}
}
impl Eq for Name {}
impl PartialEq<str> for Name {
fn eq(&self, other: &str) -> bool {
let self_iter = self.string.chars().flat_map(|c| c.to_uppercase());
let other_iter = other.chars().flat_map(|c| c.to_uppercase());
self_iter.eq(other_iter)
}
}
impl PartialOrd for Name {
fn partial_cmp(&self, other: &Name) -> Option<Ordering> {
let self_iter = self.string.chars().flat_map(|c| c.to_uppercase());
let other_iter = other.string.chars().flat_map(|c| c.to_uppercase());
self_iter.partial_cmp(other_iter)
}
}
impl Ord for Name {
fn cmp(&self, other: &Name) -> Ordering {
let self_iter = self.string.chars().flat_map(|c| c.to_uppercase());
let other_iter = other.string.chars().flat_map(|c| c.to_uppercase());
self_iter.cmp(other_iter)
}
}
impl PartialOrd<str> for Name {
fn partial_cmp(&self, other: &str) -> Option<Ordering> {
let self_iter = self.string.chars().flat_map(|c| c.to_uppercase());
let other_iter = other.chars().flat_map(|c| c.to_uppercase());
self_iter.partial_cmp(other_iter)
}
}
|
#[cfg(test)]
extern crate lib_pixel;
pub mod pixel{
#[derive(Debug, Clone, Copy)]
pub struct Pixel{ //création de la structure
red : u8,
green : u8,
blue : u8
}
impl Pixel{
pub fn new(red : u8, green: u8, blue:u8) -> Self{ //constructeur
Pixel{
red:red,
green:green,
blue:blue,
}
}
pub fn red(self) -> u8{ //retourne la valeur rouge
self.red
}
pub fn green(self) -> u8{ //retoure la valeur vert
self.green
}
pub fn blue(self) -> u8{ //retourn la valeur bleu
self.blue
}
pub fn display(self) -> String{ // renvoi le pixel sous forme d'une String (r,g,b)
format!("({},{},{})",self.red,self.green,self.blue)
}
pub fn invert(&mut self){ //inversion bit à bit
self.red = !self.red;
self.green = !self.green;
self.blue = !self.blue;
}
pub fn grayscale(&mut self){ //moyenne des valeurs rgb puis répartition ==> transformation en niveau de gris
let gray = ((self.green as u16 + self.red as u16 + self.blue as u16) / 3) as u8;
self.red = gray;
self.green = gray;
self.blue = gray;
}
pub fn display_to_byte(self) -> String{ //display pour avoir la même chose que le fichier .ppm
format!("{} {} {}",self.red,self.green,self.blue)
}
}
impl PartialEq for Pixel {
fn eq(&self, other:&Pixel) -> bool{ //vérifie l'égalité de 2 pixels de même nature ==> rgb ou niveau de gris
(self.red() == other.red()) && (self.green() == other.green()) && (self.blue() == other.blue())
}
}
} |
use super::mock::*;
use crate::{Error, NFTsForSale};
use frame_support::{assert_noop, assert_ok, StorageMap};
use frame_system::RawOrigin;
#[test]
fn cannot_list_nft_if_not_owner() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_noop!(
Marketplace::list(RawOrigin::Signed(BOB).into(), 0, 1),
Error::<Test>::NotNftOwner
);
})
}
#[test]
fn cannot_list_the_same_nft_twice() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 1));
assert_noop!(
Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 1),
ternoa_nfts::Error::<Test>::Locked
);
})
}
#[test]
fn list_register_price() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 1));
assert_eq!(Marketplace::nft_for_sale(0), (ALICE, 1));
})
}
#[test]
fn cannot_buy_if_not_for_sale() {
ExtBuilder::default().build().execute_with(|| {
assert_noop!(
Marketplace::buy(RawOrigin::Signed(ALICE).into(), 0),
Error::<Test>::NftNotForSale
);
})
}
#[test]
fn cannot_buy_if_not_enough_money() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 1));
assert_noop!(
Marketplace::buy(RawOrigin::Signed(BOB).into(), 0),
pallet_balances::Error::<Test, _>::InsufficientBalance
);
})
}
#[test]
fn buy_transfer_funds_to_owner() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_ok!(Marketplace::buy(RawOrigin::Signed(BOB).into(), 0));
assert_eq!(Balances::free_balance(ALICE), 150);
assert_eq!(Balances::free_balance(BOB), 50);
})
}
#[test]
fn buy_change_owner() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_ok!(Marketplace::buy(RawOrigin::Signed(BOB).into(), 0));
assert_eq!(NFTs::data(0).owner, BOB);
})
}
#[test]
fn buy_unlock_nft() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_ok!(Marketplace::buy(RawOrigin::Signed(BOB).into(), 0));
assert_eq!(NFTs::data(0).locked, false);
})
}
#[test]
fn cannot_unlist_if_not_listed() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_noop!(
Marketplace::unlist(RawOrigin::Signed(ALICE).into(), 0),
Error::<Test>::NftNotForSale
);
})
}
#[test]
fn cannot_unlist_if_not_owner() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_noop!(
Marketplace::unlist(RawOrigin::Signed(BOB).into(), 0),
Error::<Test>::NotNftOwner
);
})
}
#[test]
fn unlist_unlocks_nft() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_ok!(Marketplace::unlist(RawOrigin::Signed(ALICE).into(), 0));
assert_eq!(NFTs::data(0).locked, false);
})
}
#[test]
fn unlist_remove_from_for_sale() {
ExtBuilder::default()
.one_nft_for_alice()
.build()
.execute_with(|| {
assert_ok!(Marketplace::list(RawOrigin::Signed(ALICE).into(), 0, 50));
assert_ok!(Marketplace::unlist(RawOrigin::Signed(ALICE).into(), 0));
assert_eq!(NFTsForSale::<Test>::contains_key(0), false);
})
}
|
use std::io::{self, Write};
use std::fs::{File, OpenOptions};
use std::ffi::OsStr;
use std::path::Path;
use std::os::unix::ffi::{OsStrExt, OsStringExt};
pub fn create_unique<P1, P2>(path: &P1, extension: Option<&P2>) -> io::Result<File>
where
P1: AsRef<Path> + ?Sized,
P2: AsRef<Path> + ?Sized,
{
let path = path.as_ref();
let extension = extension.map(AsRef::as_ref);
let mut path_buf: Vec<u8> = path
.as_os_str()
.to_owned()
.into_vec();
let stem_size = path_buf.len();
let mut try_create = |count: usize| -> io::Result<File> {
if count > 0 {
write!(path_buf, " ({})", count)
.expect("write to vec failed");
}
if let Some(extension) = extension {
path_buf.push(b'.');
path_buf.extend_from_slice(
extension
.as_os_str()
.as_bytes()
);
}
let filename = OsStr::from_bytes(&path_buf);
let result = OpenOptions
::new()
.write(true)
.create_new(true)
.open(filename);
if result.is_ok() {
log::debug!("created unique file: {:#?}", Path::new(filename));
}
path_buf.truncate(stem_size);
result
};
let mut count: usize = 0;
loop {
match try_create(count) {
Err(e) if e.kind() == io::ErrorKind::AlreadyExists => {
count = count
.checked_add(1)
.ok_or(
io::ErrorKind::AlreadyExists
)?;
},
other => return other,
};
}
}
|
pub fn problem_006() -> usize {
let n = 100;
let sum_of_squares: usize = (1..n + 1).map(|x| x * x).fold(0, |sum, x| sum + x);
let square_of_sum: usize = (1..n + 1).fold(0, |sum, x| sum + x).pow(2);
square_of_sum - sum_of_squares
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_006() {
let ans: usize = problem_006();
println!("Answer to Problem 6: {}", ans);
assert!(ans == 25164150)
}
#[bench]
fn bench_problem_006(b: &mut Bencher) {
b.iter(|| problem_006());
}
}
|
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use cosmwasm_std::{CanonicalAddr, Decimal, Order, ReadonlyStorage, StdResult, Storage, Uint128};
use cosmwasm_storage::{
bucket, bucket_read, singleton, singleton_read, Bucket, ReadonlyBucket, Singleton,
};
use spectrum_protocol::common::{
calc_range_end, calc_range_end_addr, calc_range_start, calc_range_start_addr, OrderBy,
};
use spectrum_protocol::gov::{ExecuteMsg, PollStatus, VoterInfo};
static KEY_CONFIG: &[u8] = b"config";
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Config {
pub owner: CanonicalAddr,
pub spec_token: CanonicalAddr,
pub quorum: Decimal,
pub threshold: Decimal,
pub voting_period: u64,
pub effective_delay: u64,
pub expiration_period: u64,
pub proposal_deposit: Uint128,
pub mint_per_block: Uint128,
pub mint_start: u64,
pub mint_end: u64,
pub warchest_address: CanonicalAddr,
pub warchest_ratio: Decimal,
}
pub fn config_store<S: Storage>(storage: &mut S) -> Singleton<S, Config> {
singleton(storage, KEY_CONFIG)
}
pub fn read_config<S: Storage>(storage: &S) -> StdResult<Config> {
singleton_read(storage, KEY_CONFIG).load()
}
static KEY_STATE: &[u8] = b"state";
#[derive(Serialize, Deserialize, Clone, PartialEq, JsonSchema)]
pub struct State {
pub contract_addr: CanonicalAddr,
pub poll_count: u64,
pub total_share: Uint128,
pub poll_deposit: Uint128,
pub last_mint: u64,
pub total_weight: u32,
}
impl State {
pub fn calc_share(&self, amount: Uint128, total_balance: Uint128) -> Uint128 {
if self.total_share.is_zero() || total_balance.is_zero() {
amount
} else {
amount.multiply_ratio(self.total_share, total_balance)
}
}
}
pub fn state_store<S: Storage>(storage: &mut S) -> Singleton<S, State> {
singleton(storage, KEY_STATE)
}
pub fn read_state<S: Storage>(storage: &S) -> StdResult<State> {
singleton_read(storage, KEY_STATE).load()
}
static PREFIX_POLL: &[u8] = b"poll";
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Poll {
pub id: u64,
pub creator: CanonicalAddr,
pub status: PollStatus,
pub yes_votes: Uint128,
pub no_votes: Uint128,
pub end_height: u64,
pub title: String,
pub description: String,
pub link: Option<String>,
pub execute_msgs: Vec<ExecuteMsg>,
pub deposit_amount: Uint128,
pub total_balance_at_end_poll: Option<Uint128>,
}
pub fn poll_store<S: Storage>(storage: &mut S) -> Bucket<S, Poll> {
bucket(PREFIX_POLL, storage)
}
pub fn read_poll<S: ReadonlyStorage>(storage: &S, key: &[u8]) -> StdResult<Option<Poll>> {
bucket_read(PREFIX_POLL, storage).may_load(key)
}
static PREFIX_ACCOUNT: &[u8] = b"account";
#[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Account {
pub share: Uint128, // total staked balance
pub locked_balance: Vec<(u64, VoterInfo)>, // maps poll_id to weight voted
}
impl Account {
pub fn calc_balance(&self, total_balance: Uint128, total_share: Uint128) -> Uint128 {
if total_share.is_zero() {
Uint128::zero()
} else {
self.share.multiply_ratio(total_balance, total_share)
}
}
}
pub fn account_store<S: Storage>(storage: &mut S) -> Bucket<S, Account> {
bucket(PREFIX_ACCOUNT, storage)
}
pub fn read_account<S: ReadonlyStorage>(storage: &S, key: &[u8]) -> StdResult<Option<Account>> {
bucket_read(PREFIX_ACCOUNT, storage).may_load(key)
}
static PREFIX_POLL_INDEXER: &[u8] = b"poll_indexer";
pub fn poll_indexer_store<'a, S: Storage>(
storage: &'a mut S,
status: &PollStatus,
) -> Bucket<'a, S, bool> {
Bucket::multilevel(
&[PREFIX_POLL_INDEXER, status.to_string().as_bytes()],
storage,
)
}
const MAX_LIMIT: u32 = 30;
const DEFAULT_LIMIT: u32 = 10;
pub fn read_polls<'a, S: ReadonlyStorage>(
storage: &'a S,
filter: Option<PollStatus>,
start_after: Option<u64>,
limit: Option<u32>,
order_by: Option<OrderBy>,
) -> StdResult<Vec<Poll>> {
let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize;
let (start, end, order_by) = match order_by {
Some(OrderBy::Asc) => (calc_range_start(start_after), None, OrderBy::Asc),
_ => (None, calc_range_end(start_after), OrderBy::Desc),
};
if let Some(status) = filter {
let poll_indexer: ReadonlyBucket<'a, S, bool> = ReadonlyBucket::multilevel(
&[PREFIX_POLL_INDEXER, status.to_string().as_bytes()],
storage,
);
poll_indexer
.range(start.as_deref(), end.as_deref(), order_by.into())
.take(limit)
.map(|item| {
let (k, _) = item?;
Ok(read_poll(storage, &k)?.unwrap())
})
.collect()
} else {
let polls: ReadonlyBucket<'a, S, Poll> = ReadonlyBucket::new(PREFIX_POLL, storage);
polls
.range(start.as_deref(), end.as_deref(), order_by.into())
.take(limit)
.map(|item| {
let (_, v) = item?;
Ok(v)
})
.collect()
}
}
static PREFIX_POLL_VOTER: &[u8] = b"poll_voter";
pub fn poll_voter_store<S: Storage>(storage: &mut S, poll_id: u64) -> Bucket<S, VoterInfo> {
Bucket::multilevel(&[PREFIX_POLL_VOTER, &poll_id.to_be_bytes()], storage)
}
pub fn read_poll_voter<S: ReadonlyStorage>(
storage: &S,
poll_id: u64,
key: &CanonicalAddr,
) -> StdResult<VoterInfo> {
ReadonlyBucket::multilevel(&[PREFIX_POLL_VOTER, &poll_id.to_be_bytes()], storage)
.load(key.as_slice())
}
pub fn read_poll_voters<'a, S: ReadonlyStorage>(
storage: &'a S,
poll_id: u64,
start_after: Option<CanonicalAddr>,
limit: Option<u32>,
order_by: Option<OrderBy>,
) -> StdResult<Vec<(CanonicalAddr, VoterInfo)>> {
let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize;
let (start, end, order_by) = match order_by {
Some(OrderBy::Asc) => (calc_range_start_addr(start_after), None, OrderBy::Asc),
_ => (None, calc_range_end_addr(start_after), OrderBy::Desc),
};
let voters: ReadonlyBucket<'a, S, VoterInfo> =
ReadonlyBucket::multilevel(&[PREFIX_POLL_VOTER, &poll_id.to_be_bytes()], storage);
voters
.range(start.as_deref(), end.as_deref(), order_by.into())
.take(limit)
.map(|item| {
let (k, v) = item?;
Ok((CanonicalAddr::from(k), v))
})
.collect()
}
static PREFIX_VAULT: &[u8] = b"vault";
#[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct Vault {
pub weight: u32,
}
pub fn vault_store<S: Storage>(storage: &mut S) -> Bucket<S, Vault> {
bucket(PREFIX_VAULT, storage)
}
pub fn read_vault<S: ReadonlyStorage>(storage: &S, key: &[u8]) -> StdResult<Option<Vault>> {
bucket_read(PREFIX_VAULT, storage).may_load(key)
}
pub fn read_vaults<S: ReadonlyStorage>(storage: &S) -> StdResult<Vec<(CanonicalAddr, Vault)>> {
bucket_read(PREFIX_VAULT, storage)
.range(None, None, Order::Descending)
.map(|item| {
let (k, v) = item?;
Ok((CanonicalAddr::from(k), v))
})
.collect()
}
|
use crate::{
gui::{
BuildContext, CustomWidget, EditorUiMessage, EditorUiNode, SceneItemMessage, Ui, UiMessage,
UiNode,
},
load_image,
scene::{
commands::{
graph::{LinkNodesCommand, SetVisibleCommand},
make_delete_selection_command, ChangeSelectionCommand, SceneCommand,
},
EditorScene, GraphSelection, Selection,
},
send_sync_message, GameEngine, Message,
};
use rg3d::{
core::{algebra::Vector2, pool::Handle, scope_profile},
engine::resource_manager::ResourceManager,
gui::{
brush::Brush,
button::ButtonBuilder,
core::color::Color,
draw::{DrawingContext, SharedTexture},
grid::{Column, GridBuilder, Row},
image::ImageBuilder,
menu::{MenuItemBuilder, MenuItemContent},
message::{
ButtonMessage, DecoratorMessage, MenuItemMessage, MessageDirection, OsEvent,
ScrollViewerMessage, TextMessage, TreeExpansionStrategy, TreeMessage, TreeRootMessage,
UiMessageData, WidgetMessage,
},
node::UINode,
popup::PopupBuilder,
scroll_viewer::ScrollViewerBuilder,
stack_panel::StackPanelBuilder,
text::TextBuilder,
tree::{Tree, TreeBuilder, TreeRootBuilder},
widget::WidgetBuilder,
window::{WindowBuilder, WindowTitle},
Control, HorizontalAlignment, NodeHandleMapping, Orientation, Thickness, VerticalAlignment,
},
scene::node::Node,
};
use std::{
collections::HashMap,
fmt::{Debug, Formatter},
ops::{Deref, DerefMut},
sync::mpsc::Sender,
};
struct ItemContextMenu {
menu: Handle<UiNode>,
delete_selection: Handle<UiNode>,
copy_selection: Handle<UiNode>,
}
impl ItemContextMenu {
pub fn new(ctx: &mut BuildContext) -> Self {
let delete_selection;
let copy_selection;
let menu = PopupBuilder::new(WidgetBuilder::new().with_visibility(false))
.with_content(
StackPanelBuilder::new(
WidgetBuilder::new()
.with_child({
delete_selection = MenuItemBuilder::new(
WidgetBuilder::new().with_min_size(Vector2::new(120.0, 20.0)),
)
.with_content(MenuItemContent::Text {
text: "Delete Selection",
shortcut: "Del",
icon: Default::default(),
})
.build(ctx);
delete_selection
})
.with_child({
copy_selection = MenuItemBuilder::new(
WidgetBuilder::new().with_min_size(Vector2::new(120.0, 20.0)),
)
.with_content(MenuItemContent::Text {
text: "Copy Selection",
shortcut: "Ctrl+C",
icon: Default::default(),
})
.build(ctx);
copy_selection
}),
)
.build(ctx),
)
.build(ctx);
Self {
menu,
delete_selection,
copy_selection,
}
}
pub fn handle_ui_message(
&mut self,
message: &UiMessage,
editor_scene: &mut EditorScene,
engine: &GameEngine,
sender: &Sender<Message>,
) {
scope_profile!();
if let UiMessageData::MenuItem(MenuItemMessage::Click) = message.data() {
if message.destination() == self.delete_selection {
sender
.send(Message::DoSceneCommand(make_delete_selection_command(
editor_scene,
engine,
)))
.unwrap();
} else if message.destination() == self.copy_selection {
if let Selection::Graph(graph_selection) = &editor_scene.selection {
editor_scene.clipboard.fill_from_selection(
graph_selection,
editor_scene.scene,
&editor_scene.physics,
engine,
);
}
}
}
}
}
pub struct WorldOutliner {
pub window: Handle<UiNode>,
root: Handle<UiNode>,
sender: Sender<Message>,
stack: Vec<(Handle<UiNode>, Handle<Node>)>,
/// Hack. Due to delayed execution of UI code we can't sync immediately after we
/// did sync_to_model, instead we defer selection syncing to post_update() - at
/// this moment UI is completely built and we can do syncing.
pub sync_selection: bool,
node_path: Handle<UiNode>,
breadcrumbs: HashMap<Handle<UiNode>, Handle<Node>>,
collapse_all: Handle<UiNode>,
expand_all: Handle<UiNode>,
locate_selection: Handle<UiNode>,
scroll_view: Handle<UiNode>,
item_context_menu: ItemContextMenu,
}
#[derive(Clone)]
pub struct SceneItem {
tree: Tree<EditorUiMessage, EditorUiNode>,
text_name: Handle<UiNode>,
node: Handle<Node>,
visibility_toggle: Handle<UiNode>,
sender: Sender<Message>,
visibility: bool,
resource_manager: ResourceManager,
}
impl Debug for SceneItem {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "SceneItem")
}
}
impl Deref for SceneItem {
type Target = CustomWidget;
fn deref(&self) -> &Self::Target {
&self.tree
}
}
impl DerefMut for SceneItem {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.tree
}
}
impl Control<EditorUiMessage, EditorUiNode> for SceneItem {
fn resolve(&mut self, node_map: &NodeHandleMapping<EditorUiMessage, EditorUiNode>) {
self.tree.resolve(node_map);
node_map.resolve(&mut self.text_name);
}
fn measure_override(&self, ui: &Ui, available_size: Vector2<f32>) -> Vector2<f32> {
self.tree.measure_override(ui, available_size)
}
fn arrange_override(&self, ui: &Ui, final_size: Vector2<f32>) -> Vector2<f32> {
self.tree.arrange_override(ui, final_size)
}
fn draw(&self, drawing_context: &mut DrawingContext) {
self.tree.draw(drawing_context);
}
fn update(&mut self, dt: f32) {
self.tree.update(dt);
}
fn handle_routed_message(&mut self, ui: &mut Ui, message: &mut UiMessage) {
self.tree.handle_routed_message(ui, message);
match message.data() {
UiMessageData::Button(msg) => {
if message.destination() == self.visibility_toggle {
if let ButtonMessage::Click = msg {
let command = SceneCommand::SetVisible(SetVisibleCommand::new(
self.node,
!self.visibility,
));
self.sender.send(Message::DoSceneCommand(command)).unwrap();
}
}
}
UiMessageData::User(EditorUiMessage::SceneItem(item)) => match item {
&SceneItemMessage::NodeVisibility(visibility) => {
if self.visibility != visibility && message.destination() == self.handle() {
self.visibility = visibility;
let image = if visibility {
load_image(include_bytes!("../resources/embed/visible.png"))
} else {
load_image(include_bytes!("../resources/embed/invisible.png"))
};
let image = ImageBuilder::new(WidgetBuilder::new())
.with_opt_texture(image)
.build(&mut ui.build_ctx());
ui.send_message(ButtonMessage::content(
self.visibility_toggle,
MessageDirection::ToWidget,
image,
));
}
}
&SceneItemMessage::Order(order) => {
if message.destination() == self.handle() {
ui.send_message(DecoratorMessage::normal_brush(
self.tree.back(),
MessageDirection::ToWidget,
Brush::Solid(if order {
Color::opaque(50, 50, 50)
} else {
Color::opaque(60, 60, 60)
}),
));
}
}
SceneItemMessage::Name(name) => {
if message.destination() == self.handle() {
let name = format!(
"{} ({}:{})",
name,
self.node.index(),
self.node.generation()
);
ui.send_message(TextMessage::text(
self.text_name,
MessageDirection::ToWidget,
name,
));
}
}
},
_ => {}
}
}
fn preview_message(&self, ui: &Ui, message: &mut UiMessage) {
self.tree.preview_message(ui, message);
}
fn handle_os_event(&mut self, self_handle: Handle<UiNode>, ui: &mut Ui, event: &OsEvent) {
self.tree.handle_os_event(self_handle, ui, event);
}
fn remove_ref(&mut self, handle: Handle<UiNode>) {
self.tree.remove_ref(handle);
}
}
#[derive(Default)]
pub struct SceneItemBuilder {
node: Handle<Node>,
name: String,
visibility: bool,
icon: Option<SharedTexture>,
context_menu: Handle<UiNode>,
}
impl SceneItemBuilder {
pub fn new() -> Self {
Self {
node: Default::default(),
name: Default::default(),
visibility: true,
icon: None,
context_menu: Default::default(),
}
}
pub fn with_node(mut self, node: Handle<Node>) -> Self {
self.node = node;
self
}
pub fn with_name(mut self, name: String) -> Self {
self.name = name;
self
}
pub fn with_visibility(mut self, visibility: bool) -> Self {
self.visibility = visibility;
self
}
pub fn with_icon(mut self, icon: Option<SharedTexture>) -> Self {
self.icon = icon;
self
}
pub fn with_context_menu(mut self, menu: Handle<UiNode>) -> Self {
self.context_menu = menu;
self
}
pub fn build(
self,
ctx: &mut BuildContext,
sender: Sender<Message>,
resource_manager: ResourceManager,
node: &Node,
) -> Handle<UiNode> {
let visible_texture = load_image(include_bytes!("../resources/embed/visible.png"));
let text_name;
let visibility_toggle;
let tree = TreeBuilder::new(
WidgetBuilder::new()
.with_context_menu(self.context_menu)
.with_margin(Thickness {
left: 1.0,
top: 1.0,
right: 0.0,
bottom: 0.0,
}),
)
.with_content(
GridBuilder::new(
WidgetBuilder::new()
.with_child(
ImageBuilder::new(
WidgetBuilder::new()
.with_width(16.0)
.with_height(16.0)
.on_column(0)
.with_margin(Thickness::uniform(1.0)),
)
.with_opt_texture(self.icon)
.build(ctx),
)
.with_child({
text_name = TextBuilder::new(
WidgetBuilder::new()
.with_foreground(if node.resource().is_some() {
Brush::Solid(Color::opaque(160, 160, 200))
} else {
Brush::Solid(rg3d::gui::COLOR_FOREGROUND)
})
.with_margin(Thickness::uniform(1.0))
.on_column(1)
.with_vertical_alignment(VerticalAlignment::Center),
)
.with_text(format!(
"{} ({}:{})",
self.name,
self.node.index(),
self.node.generation()
))
.build(ctx);
text_name
})
.with_child({
visibility_toggle = ButtonBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0))
.with_width(22.0)
.with_height(16.0)
.with_horizontal_alignment(HorizontalAlignment::Right)
.on_column(2),
)
.with_content(
ImageBuilder::new(
WidgetBuilder::new().with_margin(Thickness::uniform(1.0)),
)
.with_opt_texture(visible_texture)
.build(ctx),
)
.build(ctx);
visibility_toggle
}),
)
.add_row(Row::stretch())
.add_column(Column::auto())
.add_column(Column::auto())
.add_column(Column::stretch())
.build(ctx),
)
.build_tree(ctx);
let item = SceneItem {
tree,
node: self.node,
visibility_toggle,
sender,
visibility: self.visibility,
resource_manager,
text_name,
};
ctx.add_node(UiNode::User(EditorUiNode::SceneItem(item)))
}
}
fn make_tree(
node: &Node,
handle: Handle<Node>,
ctx: &mut BuildContext,
sender: Sender<Message>,
resource_manager: ResourceManager,
context_menu: Handle<UiNode>,
) -> Handle<UiNode> {
let icon = match node {
Node::Light(_) => load_image(include_bytes!("../resources/embed/light.png")),
_ => load_image(include_bytes!("../resources/embed/cube.png")),
};
SceneItemBuilder::new()
.with_name(node.name().to_owned())
.with_node(handle)
.with_visibility(node.visibility())
.with_icon(icon)
.with_context_menu(context_menu)
.build(ctx, sender, resource_manager, node)
}
fn tree_node(ui: &Ui, tree: Handle<UiNode>) -> Handle<Node> {
if let UiNode::User(EditorUiNode::SceneItem(item)) = ui.node(tree) {
return item.node;
}
unreachable!()
}
fn colorize(tree: Handle<UiNode>, ui: &Ui, index: &mut usize) {
match ui.node(tree) {
UINode::User(EditorUiNode::SceneItem(i)) => {
ui.send_message(UiMessage::user(
tree,
MessageDirection::ToWidget,
EditorUiMessage::SceneItem(SceneItemMessage::Order(*index % 2 == 0)),
));
*index += 1;
for &item in i.tree.items() {
colorize(item, ui, index);
}
}
UINode::TreeRoot(root) => {
for &item in root.items() {
colorize(item, ui, index);
}
}
_ => (),
}
}
impl WorldOutliner {
pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {
let root;
let node_path;
let collapse_all;
let expand_all;
let locate_selection;
let scroll_view;
let window = WindowBuilder::new(WidgetBuilder::new())
.can_minimize(false)
.with_title(WindowTitle::text("Scene Graph"))
.with_content(
GridBuilder::new(
WidgetBuilder::new()
.with_child(
StackPanelBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0))
.on_row(0)
.with_child({
collapse_all = ButtonBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0)),
)
.with_text("Collapse All")
.build(ctx);
collapse_all
})
.with_child({
expand_all = ButtonBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0)),
)
.with_text("Expand All")
.build(ctx);
expand_all
})
.with_child({
locate_selection = ButtonBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0)),
)
.with_text("Locate Selection")
.build(ctx);
locate_selection
}),
)
.with_orientation(Orientation::Horizontal)
.build(ctx),
)
.with_child(
TextBuilder::new(
WidgetBuilder::new()
.on_row(1)
.on_column(0)
.with_opacity(0.4),
)
.with_text("Breadcrumbs")
.with_vertical_text_alignment(VerticalAlignment::Center)
.with_horizontal_text_alignment(HorizontalAlignment::Center)
.build(ctx),
)
.with_child(
ScrollViewerBuilder::new(WidgetBuilder::new().on_row(1))
.with_content({
node_path = StackPanelBuilder::new(WidgetBuilder::new())
.with_orientation(Orientation::Horizontal)
.build(ctx);
node_path
})
.build(ctx),
)
.with_child({
scroll_view = ScrollViewerBuilder::new(WidgetBuilder::new().on_row(2))
.with_content({
root = TreeRootBuilder::new(WidgetBuilder::new()).build(ctx);
root
})
.build(ctx);
scroll_view
}),
)
.add_column(Column::stretch())
.add_row(Row::strict(24.0))
.add_row(Row::strict(24.0))
.add_row(Row::stretch())
.build(ctx),
)
.build(ctx);
let item_context_menu = ItemContextMenu::new(ctx);
Self {
window,
sender,
root,
node_path,
stack: Default::default(),
sync_selection: false,
breadcrumbs: Default::default(),
locate_selection,
collapse_all,
expand_all,
scroll_view,
item_context_menu,
}
}
pub fn sync_to_model(&mut self, editor_scene: &EditorScene, engine: &mut GameEngine) {
scope_profile!();
let scene = &mut engine.scenes[editor_scene.scene];
let graph = &mut scene.graph;
let ui = &mut engine.user_interface;
let mut selected_items = Vec::new();
// Sync tree structure with graph structure.
self.stack.clear();
self.stack.push((self.root, graph.get_root()));
while let Some((tree_handle, node_handle)) = self.stack.pop() {
// Hide all editor nodes.
if node_handle == editor_scene.root {
continue;
}
let node = &graph[node_handle];
match ui.node(tree_handle) {
UiNode::User(usr) => {
if let EditorUiNode::SceneItem(item) = usr {
// Since we are filtering out editor stuff from world outliner, we must
// correctly count children, excluding editor nodes.
let mut child_count = 0;
for &child in node.children() {
if child != editor_scene.root {
child_count += 1;
}
}
let items = item.tree.items().to_vec();
if child_count < items.len() {
for &item in items.iter() {
let child_node = tree_node(ui, item);
if !node.children().contains(&child_node) {
send_sync_message(
ui,
TreeMessage::remove_item(
tree_handle,
MessageDirection::ToWidget,
item,
),
);
} else {
self.stack.push((item, child_node));
}
}
} else if child_count > item.tree.items().len() {
for &child_handle in node.children() {
// Hide all editor nodes.
if child_handle == editor_scene.root {
continue;
}
let mut found = false;
for &item in items.iter() {
let tree_node_handle = tree_node(ui, item);
if tree_node_handle == child_handle {
self.stack.push((item, child_handle));
found = true;
break;
}
}
if !found {
let tree = make_tree(
&graph[child_handle],
child_handle,
&mut ui.build_ctx(),
self.sender.clone(),
engine.resource_manager.clone(),
self.item_context_menu.menu,
);
send_sync_message(
ui,
TreeMessage::add_item(
tree_handle,
MessageDirection::ToWidget,
tree,
),
);
if let Selection::Graph(selection) = &editor_scene.selection {
if selection.contains(child_handle) {
selected_items.push(tree);
}
}
self.stack.push((tree, child_handle));
}
}
} else {
for &tree in items.iter() {
let child = tree_node(ui, tree);
self.stack.push((tree, child));
}
}
}
}
UiNode::TreeRoot(root) => {
if root.items().is_empty() {
let tree = make_tree(
node,
node_handle,
&mut ui.build_ctx(),
self.sender.clone(),
engine.resource_manager.clone(),
self.item_context_menu.menu,
);
send_sync_message(
ui,
TreeRootMessage::add_item(
tree_handle,
MessageDirection::ToWidget,
tree,
),
);
self.stack.push((tree, node_handle));
} else {
self.stack.push((root.items()[0], node_handle));
}
}
_ => unreachable!(),
}
}
if !selected_items.is_empty() {
send_sync_message(
ui,
TreeRootMessage::select(self.root, MessageDirection::ToWidget, selected_items),
);
}
// Update breadcrumbs.
self.breadcrumbs.clear();
for &child in ui.node(self.node_path).children() {
send_sync_message(ui, WidgetMessage::remove(child, MessageDirection::ToWidget));
}
if let Selection::Graph(selection) = &editor_scene.selection {
if let Some(&first_selected) = selection.nodes().first() {
let mut item = first_selected;
while item.is_some() {
let node = &graph[item];
let element = ButtonBuilder::new(
WidgetBuilder::new().with_margin(Thickness::uniform(1.0)),
)
.with_text(node.name())
.build(&mut ui.build_ctx());
send_sync_message(
ui,
WidgetMessage::link_reverse(
element,
MessageDirection::ToWidget,
self.node_path,
),
);
self.breadcrumbs.insert(element, item);
item = node.parent();
}
}
}
// Sync items data.
let mut stack = vec![self.root];
while let Some(handle) = stack.pop() {
match ui.node(handle) {
UiNode::User(usr) => {
if let EditorUiNode::SceneItem(item) = usr {
if graph.is_valid_handle(item.node) {
let node = &graph[item.node];
send_sync_message(
ui,
SceneItemMessage::node_visibility(handle, node.visibility()),
);
send_sync_message(
ui,
SceneItemMessage::name(handle, node.name().to_owned()),
);
stack.extend_from_slice(item.tree.items());
}
}
}
UiNode::TreeRoot(root) => stack.extend_from_slice(root.items()),
_ => unreachable!(),
}
}
self.colorize(ui);
}
fn map_tree_to_node(&self, tree: Handle<UiNode>, ui: &Ui) -> Handle<Node> {
if tree.is_some() {
tree_node(ui, tree)
} else {
Handle::NONE
}
}
pub fn colorize(&mut self, ui: &Ui) {
let mut index = 0;
colorize(self.root, ui, &mut index);
}
pub fn handle_ui_message(
&mut self,
message: &UiMessage,
editor_scene: &mut EditorScene,
engine: &GameEngine,
) {
scope_profile!();
self.item_context_menu
.handle_ui_message(message, editor_scene, engine, &self.sender);
match message.data() {
UiMessageData::TreeRoot(msg) => {
if message.destination() == self.root
&& message.direction() == MessageDirection::FromWidget
{
if let TreeRootMessage::Selected(selection) = msg {
let new_selection = Selection::Graph(GraphSelection::from_list(
selection
.iter()
.map(|&h| self.map_tree_to_node(h, &engine.user_interface))
.collect(),
));
if new_selection != editor_scene.selection {
self.sender
.send(Message::DoSceneCommand(SceneCommand::ChangeSelection(
ChangeSelectionCommand::new(
new_selection,
editor_scene.selection.clone(),
),
)))
.unwrap();
}
}
}
}
&UiMessageData::Widget(WidgetMessage::Drop(node)) => {
if engine.user_interface.is_node_child_of(node, self.root)
&& engine
.user_interface
.is_node_child_of(message.destination(), self.root)
&& node != message.destination()
{
let child = self.map_tree_to_node(node, &engine.user_interface);
let parent =
self.map_tree_to_node(message.destination(), &engine.user_interface);
if child.is_some() && parent.is_some() {
// Make sure we won't create any loops - child must not have parent in its
// descendants.
let mut attach = true;
let graph = &engine.scenes[editor_scene.scene].graph;
let mut p = parent;
while p.is_some() {
if p == child {
attach = false;
break;
}
p = graph[p].parent();
}
if attach {
self.sender
.send(Message::DoSceneCommand(SceneCommand::LinkNodes(
LinkNodesCommand::new(child, parent),
)))
.unwrap();
}
}
}
}
UiMessageData::Button(ButtonMessage::Click) => {
if let Some(&node) = self.breadcrumbs.get(&message.destination()) {
self.sender
.send(Message::DoSceneCommand(SceneCommand::ChangeSelection(
ChangeSelectionCommand::new(
Selection::Graph(GraphSelection::single_or_empty(node)),
editor_scene.selection.clone(),
),
)))
.unwrap();
} else if message.destination() == self.collapse_all {
engine
.user_interface
.send_message(TreeRootMessage::collapse_all(
self.root,
MessageDirection::ToWidget,
));
} else if message.destination() == self.expand_all {
engine
.user_interface
.send_message(TreeRootMessage::expand_all(
self.root,
MessageDirection::ToWidget,
));
} else if message.destination() == self.locate_selection {
if let Selection::Graph(ref selection) = editor_scene.selection {
if let Some(&first) = selection.nodes().first() {
let tree = self.map_node_to_tree(&engine.user_interface, first);
engine.user_interface.send_message(TreeMessage::expand(
tree,
MessageDirection::ToWidget,
true,
TreeExpansionStrategy::RecursiveAncestors,
));
engine.user_interface.send_message(
ScrollViewerMessage::bring_into_view(
self.scroll_view,
MessageDirection::ToWidget,
tree,
),
);
}
}
}
}
_ => {}
}
}
pub fn post_update(&mut self, editor_scene: &EditorScene, engine: &GameEngine) {
// Hack. See `self.sync_selection` for details.
if self.sync_selection {
let ui = &engine.user_interface;
let trees = if let Selection::Graph(selection) = &editor_scene.selection {
selection
.nodes()
.iter()
.map(|&n| self.map_node_to_tree(ui, n))
.collect()
} else {
Default::default()
};
send_sync_message(
ui,
TreeRootMessage::select(self.root, MessageDirection::ToWidget, trees),
);
self.sync_selection = false;
}
}
pub fn clear(&mut self, ui: &mut Ui) {
ui.send_message(TreeRootMessage::items(
self.root,
MessageDirection::ToWidget,
vec![],
));
}
fn map_node_to_tree(&self, ui: &Ui, node: Handle<Node>) -> Handle<UiNode> {
let mut stack = vec![self.root];
while let Some(tree_handle) = stack.pop() {
match ui.node(tree_handle) {
UiNode::User(usr) => {
if let EditorUiNode::SceneItem(item) = usr {
if item.node == node {
return tree_handle;
}
stack.extend_from_slice(item.tree.items());
}
}
UiNode::TreeRoot(root) => {
stack.extend_from_slice(root.items());
}
_ => unreachable!(),
}
}
unreachable!("Must not be reached. If still triggered then there is a bug.")
}
}
|
use shorthand::ShortHand;
#[derive(Copy, Clone, Default)]
struct Number(usize);
#[derive(ShortHand, Default)]
#[shorthand(enable(copy))]
struct Command {
index: Number,
#[shorthand(disable(copy))]
index2: Number,
index3: Number,
}
#[test]
fn test_copy() {
let _: Number = Command::default().index();
let _: &Number = Command::default().index2();
let _: Number = Command::default().index3();
}
fn main() {}
|
// chapter 2 "using varibales and types"
fn main() {
// value 5 is bound to a variable "energy"
let energy = 5;
let copy_energy = energy;
println!("your energy is {}", copy_energy);
}
/* output should be:
end of output */
|
use anchor_lang::prelude::*;
declare_id!("Av2WRMKbkw1ircKXbxh9djiBUhJzasHEhXXHkcz3xVUw");
const LIKES_CAPACITY: u8 = 200;
#[program]
pub mod likes {
use super::*;
pub fn create_likes_account(ctx: Context<CreateLikesAccount>) -> ProgramResult {
let mut likes = ctx.accounts.likes.load_init()?;
likes.insert_at = 0;
likes.transactions = [Transaction { signature: [0; 88] }; 200];
verify_likes_address(ctx.accounts.likes.key(), ctx.accounts.user.key());
Ok(())
}
pub fn new_like(ctx: Context<NewLike>, tx_signature: String) -> ProgramResult {
let mut likes = ctx.accounts.likes.load_mut()?;
verify_likes_address(ctx.accounts.likes.key(), ctx.accounts.user.key());
let new_transaction = new_transaction(tx_signature);
let insert_at = usize::from(likes.insert_at);
likes.transactions[insert_at] = new_transaction;
likes.insert_at = next_insert_index(likes.insert_at);
Ok(())
}
}
pub fn verify_likes_address(likes: Pubkey, user: Pubkey) {
if likes != expected_likes_address(user) {
msg!("wrong likes account, doesn't belong to given user");
panic!();
}
}
pub fn expected_likes_address(base: Pubkey) -> Pubkey {
Pubkey::create_with_seed(&base, "likes", &id()).unwrap()
}
pub fn next_insert_index(previous: u8) -> u8 {
if (previous + 1) == LIKES_CAPACITY {
0
} else {
previous + 1
}
}
fn new_transaction(tx_signature: String) -> Transaction {
let signature_bytes = tx_signature.as_bytes();
let mut new_signature = [0u8; 88];
new_signature[..signature_bytes.len()].copy_from_slice(signature_bytes);
Transaction {
signature: new_signature,
}
}
#[derive(Accounts)]
pub struct NewLike<'info> {
#[account(mut)]
likes: Loader<'info, Likes>,
user: Signer<'info>,
}
#[derive(Accounts)]
pub struct CreateLikesAccount<'info> {
#[account(zero)]
likes: Loader<'info, Likes>,
user: Signer<'info>,
}
#[account(zero_copy)]
pub struct Likes {
insert_at: u8,
transactions: [Transaction; 200],
}
#[zero_copy]
pub struct Transaction {
pub signature: [u8; 88],
}
|
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate rocket;
extern crate serde_json;
#[macro_use] extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
extern crate futures;
extern crate tokio_core;
extern crate tokio_process;
//#[cfg(test)] mod tests;
//#[macro_use] extern crate log;
extern crate daemonize;
use rocket_contrib::{Json, Value};
use rocket::State;
use std::collections::HashMap;
use std::sync::Mutex;
//use serde_json::{Value, Error};
use std::process::Command;
use tokio_core::reactor::Core;
use tokio_process::CommandExt;
// The type to represent the ID of a message.
type ID = usize;
// We're going to store all of the messages here. No need for a DB.
type MessageMap = Mutex<HashMap<ID, String>>;
#[derive(Serialize, Deserialize)]
struct Message {
id: Option<ID>,
contents: String
}
#[derive(Serialize, Deserialize)]
struct Instruction {
id: Option<ID>,
command: String,
cwd: String,
state: String
}
// TODO: This example can be improved by using `route` with multiple HTTP verbs.
#[post("/", format = "application/json", data = "<message>")]
fn new(message: Json<Instruction>, map: State<MessageMap>) -> Json<Value> {
let mut hashmap = map.lock().expect("map lock.");
let storedjson = json!({
"command" : message.0.command,
"cwd" : message.0.cwd,
"state" : message.0.state
});
println!("{}", storedjson.to_string());
let id = 1;
hashmap.insert(id, storedjson.to_string());
let patterns : &[_] = &['[', ']'];
let command_vec = message.command.lines().map(|s| s.trim_matches(patterns)
.split(',').map(String::from).collect::<Vec<String>>())
.collect::<Vec<Vec<String>>>();
let patterns : &[_] = &[' '];
println!("{:?}", command_vec[0][0].trim_matches(patterns));
println!("{:?}", command_vec[0][1].trim_matches(patterns));
println!("{:?}", command_vec[0][2].trim_matches(patterns));
let mut core = Core::new().unwrap();
//let cwd = format!("{}{}", "cd ", message.cwd);
//println!("{:?}", format!("{}", message.cwd));
let child = Command::new("cd")
.arg(format!("{}", message.cwd))
.spawn_async(&core.handle());
let child = child.expect("failed to spawn");
match core.run(child) {
Ok(status) => println!("exit status: {}", status),
Err(e) => panic!("failed to wait for exit: {}", e),
}
let child = Command::new(command_vec[0][0].trim_matches(patterns))
.arg(command_vec[0][1].trim_matches(patterns))
.arg(command_vec[0][2].trim_matches(patterns))
.spawn_async(&core.handle());
let child = child.expect("failed to spawn");
match core.run(child) {
Ok(status) => println!("exit status: {}", status),
Err(e) => panic!("failed to wait for exit: {}", e),
}
Json(json!({
"status": "The command executed successfully",
"state": message.state,
}))
}
#[error(404)]
fn not_found() -> Json<Value> {
Json(json!({
"status": "error",
"reason": "Resource was not found."
}))
}
fn rocket() -> rocket::Rocket {
rocket::ignite()
.mount("/message", routes![new])
.catch(errors![not_found])
.manage(Mutex::new(HashMap::<ID, String>::new()))
}
fn main() {
rocket().launch();
}
|
use anyhow::Result;
use nom::{
bytes::complete::{is_not, tag},
character::complete::{digit1, line_ending},
combinator::map,
multi::{many1, separated_list1},
sequence::tuple,
IResult,
};
use std::{fs, str::FromStr};
fn parse_num<T>(input: &str) -> IResult<&str, T>
where
T: FromStr,
<T as std::str::FromStr>::Err: std::fmt::Debug,
{
map(digit1, |digit_str: &str| digit_str.parse::<T>().unwrap())(input)
}
#[derive(Debug)]
struct Rule<'a> {
name: &'a str,
low_min: usize,
low_max: usize,
high_min: usize,
high_max: usize,
}
fn parse_rule(input: &str) -> IResult<&str, Rule> {
let (input, name) = is_not(":")(input)?;
let (input, _) = tag(": ")(input)?;
let (input, low_min) = parse_num(input)?;
let (input, _) = tag("-")(input)?;
let (input, low_max) = parse_num(input)?;
let (input, _) = tag(" or ")(input)?;
let (input, high_min) = parse_num(input)?;
let (input, _) = tag("-")(input)?;
let (input, high_max) = parse_num(input)?;
Ok((
input,
Rule {
name,
low_min,
low_max,
high_min,
high_max,
},
))
}
type Ticket = Vec<usize>;
fn parse_input(input: &str) -> IResult<&str, (Vec<Rule>, Ticket, Vec<Ticket>)> {
let mut parse_ticket = separated_list1(tag(","), parse_num);
let (input, rules) = separated_list1(line_ending, parse_rule)(input)?;
let (input, _) = tuple((many1(line_ending), tag("your ticket:"), line_ending))(input)?;
let (input, my_ticket) = parse_ticket(input)?;
let (input, _) = tuple((many1(line_ending), tag("nearby tickets:"), line_ending))(input)?;
let (input, nearby_tickets) = separated_list1(line_ending, parse_ticket)(input)?;
Ok((input, (rules, my_ticket, nearby_tickets)))
}
fn do_the_thing(input: &str) -> usize {
let (_, (rules, _, nearby_tickets)) = parse_input(input).unwrap();
nearby_tickets
.into_iter()
.flat_map(|ticket| {
ticket
.into_iter()
.filter(|&field| {
rules
.iter()
.filter(|rule| {
(rule.low_min..=rule.low_max).contains(&field)
|| (rule.high_min..=rule.high_max).contains(&field)
})
.count()
== 0
})
.collect::<Vec<_>>()
})
.sum()
}
fn main() -> Result<()> {
let input = fs::read_to_string("input.txt")?;
println!("{:?}", do_the_thing(&input));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("class: 1-3 or 5-7
row: 6-11 or 33-44
seat: 13-40 or 45-50
your ticket:
7,1,14
nearby tickets:
7,3,47
40,4,50
55,2,20
38,6,12" => 71)]
fn first(input: &str) -> usize {
do_the_thing(&input)
}
}
|
//! Types.
use gfx_debug_draw;
use gfx_device_gl;
/// The type of debug renderer.
pub type DebugRenderer =
gfx_debug_draw::DebugRenderer<gfx_device_gl::Resources, gfx_device_gl::Factory>;
|
use std::collections::HashMap;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::io::BufReader;
use std::path::PathBuf;
use anyhow::{anyhow, Result};
use serde::{Deserialize, Serialize};
use crate::lineage;
#[derive(Debug, Deserialize)]
struct DiagramAssignment {
urs: String,
model_name: String,
sequence_taxid: usize,
model_taxid: usize,
}
#[derive(Debug, Deserialize, Serialize)]
struct Lca {
urs: String,
taxid: usize,
model_name: String,
ancestor_rank: lineage::Rank,
}
type TreeInfo = HashMap<usize, lineage::Mapping>;
fn load_taxid_trees(filename: PathBuf) -> Result<TreeInfo> {
let file = File::open(filename)?;
let file = BufReader::new(file);
let mut info: TreeInfo = HashMap::new();
for line in file.lines() {
let line = line?;
let mapping: lineage::Mapping = serde_json::from_str(&line)?;
info.insert(mapping.taxid, mapping);
}
return Ok(info);
}
fn lca(trees: &TreeInfo, assignment: DiagramAssignment) -> Result<Lca> {
let sequence_lineage = match trees.get(&assignment.sequence_taxid) {
Some(v) => Ok(v),
None => Err(anyhow!(
"Missing lineage for {}",
&assignment.sequence_taxid
)),
}?;
let model_lineage = match trees.get(&assignment.model_taxid) {
Some(v) => Ok(v),
None => Err(anyhow!("Missing lineage for {}", &assignment.model_taxid)),
}?;
if assignment.sequence_taxid == assignment.model_taxid {
return Ok(Lca {
urs: assignment.urs,
taxid: assignment.sequence_taxid,
model_name: assignment.model_name,
ancestor_rank: lineage::Rank::Species,
});
}
for rank in lineage::Rank::ascending() {
let sequence_taxon = sequence_lineage.taxon_at(&rank);
let model_taxon = model_lineage.taxon_at(&rank);
if sequence_taxon == model_taxon {
return Ok(Lca {
urs: assignment.urs,
taxid: assignment.sequence_taxid,
model_name: assignment.model_name,
ancestor_rank: rank,
});
}
}
return Err(anyhow!("Failed to find lca for {:?}", assignment));
}
pub fn write_lca(taxid_filename: PathBuf, assignments_filename: PathBuf) -> Result<()> {
let mut wtr = csv::Writer::from_writer(io::stdout());
let trees = load_taxid_trees(taxid_filename)?;
let file = File::open(assignments_filename)?;
let file = BufReader::new(file);
let mut reader = csv::Reader::from_reader(file);
for result in reader.deserialize() {
let assignment: DiagramAssignment = result?;
let lca = lca(&trees, assignment)?;
wtr.serialize(lca)?;
}
return Ok(());
}
|
use crate::irc::command::Error as CommandError;
use crate::irc::prefix::Error as PrefixError;
use crate::irc::{command::Command, prefix::Prefix};
#[derive(Debug, PartialEq)]
pub enum Error {
PrefixError(PrefixError),
CommandError(CommandError),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Message {
pub prefix: Option<Prefix>,
pub command: Command,
}
impl Message {
pub fn parse(input: &str) -> Result<Self, Error> {
let prefix = match Prefix::parse(input) {
Ok((prefix, end)) => Some((prefix, end)),
Err(PrefixError::MissingLead) => None,
Err(err) => return Err(Error::PrefixError(err)),
};
let input = if prefix.is_some() {
&input[prefix.as_ref().unwrap().1 + 1..]
} else {
input
};
Ok(Message {
command: Command::parse(&input).map_err(Error::CommandError)?,
prefix: prefix.map(|(p, _)| p),
})
}
/// this panics if invalid state
pub fn get_nick(&self) -> &str {
match &self.prefix {
Some(Prefix::User { nick, .. }) => nick,
_ => unreachable!(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
// TODO more comprehensive tests
#[test]
fn parse_message() {
let inputs = &[
":tmi.twitch.tv CAP * LS :twitch.tv/tags twitch.tv/commands twitch.tv/membership",
":tmi.twitch.tv 001 museun :Welcome, GLHF!",
":tmi.twitch.tv 002 museun :Your host is tmi.twitch.tv",
":tmi.twitch.tv 003 museun :This server is rather new",
":tmi.twitch.tv 004 museun :-",
":tmi.twitch.tv 375 museun :-",
":tmi.twitch.tv 372 museun :You are in a maze of twisty passages, all alike.",
":tmi.twitch.tv 376 museun :>",
":tmi.twitch.tv CAP * ACK :twitch.tv/membership",
":museun!museun@museun.tmi.twitch.tv JOIN #museun",
":museun.tmi.twitch.tv 353 museun = #museun :museun",
":museun.tmi.twitch.tv 366 museun #museun :End of /NAMES list",
":tmi.twitch.tv 421 museun WHO :Unknown command",
];
for input in inputs {
let msg = Message::parse(input);
assert!(msg.is_ok());
}
}
}
|
pub mod persistent;
|
mod clients;
pub mod prelude;
pub mod requests;
pub mod responses;
use crate::core::Client;
use crate::responses::PopReceipt;
pub use clients::*;
use std::borrow::Cow;
use std::fmt::Debug;
use std::time::Duration;
//********* Request traits
pub trait VisibilityTimeoutSupport {
type O;
fn with_visibility_timeout(self, timeout: Duration) -> Self::O;
}
pub trait VisibilityTimeoutOption {
fn visibility_timeout(&self) -> Option<Duration>;
fn to_uri_parameter(&self) -> Option<String> {
self.visibility_timeout()
.map(|visibility_timeout| format!("visibilitytimeout={}", visibility_timeout.as_secs()))
}
fn append_to_url(&self, url: &mut url::Url) {
if let Some(visibility_timeout) = self.visibility_timeout() {
url.query_pairs_mut().append_pair(
"visibilitytimeout",
&format!("{}", visibility_timeout.as_secs()),
);
}
}
}
pub trait VisibilityTimeoutRequired {
fn visibility_timeout(&self) -> Duration;
fn to_uri_parameter(&self) -> String {
format!("visibilitytimeout={}", self.visibility_timeout().as_secs())
}
}
pub trait MessageTTLSupport {
type O;
fn with_message_ttl_seconds(self, timeout: u64) -> Self::O;
}
pub trait MessageTTLRequired {
fn message_ttl_seconds(&self) -> u64;
fn to_uri_parameter(&self) -> String {
format!("messagettl={}", self.message_ttl_seconds())
}
fn append_to_url(&self, url: &mut url::Url) {
url.query_pairs_mut()
.append_pair("messagettl", &format!("{}", self.message_ttl_seconds()));
}
}
pub trait NumberOfMessagesSupport {
type O;
fn with_number_of_messages(self, number_of_messages: u32) -> Self::O;
}
pub trait NumberOfMessagesOption {
fn number_of_messages(&self) -> Option<u32>;
fn to_uri_parameter(&self) -> Option<String> {
self.number_of_messages()
.map(|number_of_messages| format!("numofmessages={}", number_of_messages))
}
fn append_to_url(&self, url: &mut url::Url) {
if let Some(number_of_messages) = self.number_of_messages() {
url.query_pairs_mut()
.append_pair("numofmessages", &format!("{}", number_of_messages));
}
}
}
/// Wraps the message like: '\<QueueMessage>\<MessageText>{}\</MessageText>\</QueueMessage>'
/// as per Azure specification.
/// See
/// [https://docs.microsoft.com/en-us/rest/api/storageservices/put-message](https://docs.microsoft.com/en-us/rest/api/storageservices/put-message)
pub trait MessageBodySupport<'b> {
type O;
/// Wraps the message like: '\<QueueMessage>\<MessageText>{}\</MessageText>\</QueueMessage>'
/// as per Azure specification.
/// See
/// [https://docs.microsoft.com/en-us/rest/api/storageservices/put-message](https://docs.microsoft.com/en-us/rest/api/storageservices/put-message)
fn with_message_body<BODY: Into<Cow<'b, str>>>(self, body: BODY) -> Self::O;
}
pub trait MessageBodyRequired {
fn message_body(&self) -> &str;
}
/// Sets both the message id and the pop receipt for deleting a message as per Azure specification.
/// See
/// [https://docs.microsoft.com/en-us/rest/api/storageservices/delete-message2](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-message2)
pub trait PopReceiptSupport {
type O;
fn with_pop_receipt(self, pop_receipt: Box<dyn PopReceipt>) -> Self::O;
}
pub trait PopReceiptRequired {
fn pop_receipt(&self) -> &dyn PopReceipt;
}
//********* Queue service traits
pub trait HasStorageClient: Debug + Send + Sync {
type StorageClient: Client;
fn storage_client(&self) -> &Self::StorageClient;
}
|
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use crate::lib::nns_types::account_identifier::AccountIdentifier;
use clap::Clap;
/// Prints the selected identity's AccountIdentifier.
#[derive(Clap)]
pub struct AccountIdOpts {}
pub async fn exec(env: &dyn Environment, _opts: AccountIdOpts) -> DfxResult {
let sender = env
.get_selected_identity_principal()
.expect("Selected identity not instantiated.");
println!("{}", AccountIdentifier::new(sender, None));
Ok(())
}
|
pub mod oauth2;
pub mod user;
use std::time::Duration;
use actix_ratelimit::{RateLimiter, RedisStore, RedisStoreActor};
use actix_web::{web, HttpResponse, Responder, ResponseError};
use crate::error::AppError;
fn scope(path: &str) -> actix_web::Scope {
web::scope(path).default_service(web::route().to(|| AppError::NotFound.error_response()))
}
fn resource(path: &str) -> actix_web::Resource {
web::resource(path)
.default_service(web::route().to(|| AppError::MethodNotAllowed.error_response()))
}
async fn index() -> impl Responder {
HttpResponse::Ok().body("it works!")
}
pub fn set_routes(cfg: &mut web::ServiceConfig, redis_addr: &str) {
let store = RedisStore::connect(redis_addr);
cfg.service(
scope("/")
.app_data(web::PathConfig::default().error_handler(|_, _| AppError::NotFound.into()))
.service(resource("").route(web::get().to(index)))
.service(
scope("auth")
.service(resource("redirect").route(web::get().to(oauth2::auth::auth)))
.service(resource("login").route(web::get().to(oauth2::login::login)))
.service(resource("logout").route(web::get().to(oauth2::logout::logout))),
)
.service(
scope("users").service(
resource("@me").route(web::get().to(user::me::me)).wrap(
RateLimiter::new(RedisStoreActor::from(store).start())
.with_interval(Duration::from_secs(60))
.with_max_requests(100),
),
),
),
);
}
|
#[doc = "Reader of register IDENTITY"]
pub type R = crate::R<u32, super::IDENTITY>;
#[doc = "Reader of field `P`"]
pub type P_R = crate::R<bool, bool>;
#[doc = "Reader of field `NS`"]
pub type NS_R = crate::R<bool, bool>;
#[doc = "Reader of field `PC`"]
pub type PC_R = crate::R<u8, u8>;
#[doc = "Reader of field `MS`"]
pub type MS_R = crate::R<u8, u8>;
impl R {
#[doc = "Bit 0 - This field specifies the privileged setting ('0': user mode; '1': privileged mode) of the transfer that reads the register."]
#[inline(always)]
pub fn p(&self) -> P_R {
P_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - This field specifies the security setting ('0': secure mode; '1': non-secure mode) of the transfer that reads the register."]
#[inline(always)]
pub fn ns(&self) -> NS_R {
NS_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bits 4:7 - This field specifies the protection context of the transfer that reads the register."]
#[inline(always)]
pub fn pc(&self) -> PC_R {
PC_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - This field specifies the bus master identifier of the transfer that reads the register."]
#[inline(always)]
pub fn ms(&self) -> MS_R {
MS_R::new(((self.bits >> 8) & 0x0f) as u8)
}
}
|
use std::dbg;
#[derive(Copy,Clone,PartialEq)]
enum Operation {
Nop,
Jmp,
Acc,
}
#[derive(Copy,Clone)]
struct Instruction {
count: isize,
operation: Operation,
argument: isize,
}
pub struct Program {
acc: Vec<isize>,
instructions: Vec<Instruction>,
}
impl Program {
fn new() -> Program {
// input length (lines)
Program {
acc: Vec::with_capacity(319), instructions: Vec::with_capacity(602)
}
}
fn to_program(s: &str) -> Program {
let mut program = Program::new();
for l in s.lines() {
let mut sw = l.split_ascii_whitespace();
let o = sw.next().unwrap();
let arg = isize::from_str_radix(sw.next().unwrap(), 10).unwrap();
let operation = match o {
"nop" => Operation::Nop,
"jmp" => Operation::Jmp,
"acc" => Operation::Acc,
_ => panic!("invalid operation")
};
program.push(operation, arg);
}
program
}
fn push(&mut self, operation: Operation, argument: isize) -> () {
self.instructions.push(Instruction {count: 0, operation: operation, argument: argument});
}
fn increment(&mut self, idx: usize) {
let copy = self.instructions[idx];
std::mem::replace(&mut self.instructions[idx], Instruction {
count: copy.count + 1, operation: copy.operation, argument: copy.argument
});
}
fn clone_reset(&self) -> Program {
let mut clone = Self::new();
for i in 0..self.instructions.len() {
let copy = self.instructions[i];
clone.instructions.push(Instruction {
count: 0, operation: copy.operation, argument: copy.argument
});
}
clone
}
fn do_instruction(&mut self, idx: usize) -> usize {
self.increment(idx);
let i = self.instructions.get(idx).unwrap();
match i.operation {
Operation::Nop => {
idx + 1
},
Operation::Jmp => {
(idx as isize + i.argument) as usize
},
Operation::Acc => {
self.acc.push(i.argument);
idx + 1
}
}
}
fn is_visited(&self, idx: usize) -> bool {
self.count(idx) > 0
}
fn is_self_loop_instruction(&self, idx: usize) -> bool {
self.instructions[idx].operation == Operation::Jmp
&& self.instructions[idx].argument == 0
}
// 8-1
pub fn find_accum_before_loop(input: &str) -> isize {
let mut program = Self::to_program(input);
let mut idx = 0;
loop {
if program.is_visited(idx) {
return program.accumulator()
}
idx = program.do_instruction(idx);
}
}
// Returns sum of the accumulator of it terminates
fn terminates(&self) -> Option<isize> {
let mut p = self.clone_reset();
let mut idx = 0;
loop {
if p.termination_condition(idx) {
return Some(p.accumulator());
}
if p.is_visited(idx) || p.is_self_loop_instruction(idx) {
return None;
}
idx = p.do_instruction(idx);
}
}
// Find a terminating program and return the accumulator after termination
pub fn find_accum_after_termination(input: &str) -> isize {
let mut program = Self::to_program(input);
let mut idx = 0;
if let Some(accum) = program.terminates() {
return accum;
}
// index for the nop/jmp switcher
let mut switch = program.next_noop_or_jmp_idx(0).unwrap();
program.switch_nop_or_jmp(switch);
loop {
if let Some(accum) = program.terminates() {
return accum;
} else {
// First we need to switch back the nop/jmp that
// didn’t fix the program (didn’t make it terminate)
program.switch_nop_or_jmp(switch);
switch = program.next_noop_or_jmp_idx(switch).unwrap();
program.switch_nop_or_jmp(switch);
}
}
}
fn count(&self, idx: usize) -> isize {
self.instructions[idx].count
}
fn accumulator(&self) -> isize {
self.acc.iter().sum()
}
fn termination_condition(&self, idx: usize) -> bool {
idx > (self.instructions.len() - 1)
}
fn next_noop_or_jmp_idx(&self, idx: usize) -> Option<usize> {
let mut i = idx + 1;
while i < self.instructions.len() {
match self.instructions[i].operation {
Operation::Nop | Operation::Jmp => {
return Some(i);
},
Operation::Acc => {
i += 1;
}
}
}
None
}
fn switch_nop_or_jmp(&mut self, idx: usize) {
match self.instructions[idx].operation {
Operation::Nop => {
self.switch_nop_to_jmp(idx);
},
Operation::Jmp => {
self.switch_jmp_to_nop(idx);
},
Operation::Acc => {
panic!("Did not expect `Acc`");
}
}
}
fn switch_nop_to_jmp(&mut self, idx: usize) {
let copy = self.instructions[idx];
std::mem::replace(&mut self.instructions[idx], Instruction {
count: copy.count, operation: Operation::Jmp, argument: copy.argument
});
}
fn switch_jmp_to_nop(&mut self, idx: usize) {
let copy = self.instructions[idx];
std::mem::replace(&mut self.instructions[idx], Instruction {
count: copy.count, operation: Operation::Nop, argument: copy.argument
});
}
}
#[test]
fn do_instruction_jmp_jumps_to_end() {
let p = "jmp 3
nop 5
nop 10
nop 4";
let mut program = Program::to_program(p);
assert_eq!(3, program.do_instruction(0));
}
#[test]
fn do_instruction_nop_goes_to_next() {
let p = "jmp 3
nop 5
nop 10
nop 4";
let mut program = Program::to_program(p);
assert_eq!(2, program.do_instruction(1));
}
#[test]
fn do_instruction_acc_changes_accumulator() {
let p = "acc 10";
let mut program = Program::to_program(p);
program.do_instruction(0);
assert_eq!(10, program.accumulator());
}
#[test]
fn do_instruction_calling_instruction_twice_sets_count_to_two() {
let p = "acc 10";
let mut program = Program::to_program(p);
program.do_instruction(0);
program.do_instruction(0);
assert_eq!(2, program.count(0));
}
#[test]
fn example_correct_accumulator_before_loop() {
let p = "nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6";
assert_eq!(5, Program::find_accum_before_loop(p));
}
#[test]
fn modified_example_from_part_two_terminates_with_correct_accumulator() {
let p = "nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
nop -4
acc +6";
let program = Program::to_program(p);
assert_eq!(8, program.terminates().unwrap());
}
#[test]
fn example_terminates_with_correct_accumulator() {
let p = "nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6";
assert_eq!(8, Program::find_accum_after_termination(p));
}
|
fn main() {
assert!(false);
}
|
use utils;
use std::collections::HashSet;
use std::char;
fn is_pandigital(n: u64) -> bool{
let s = n.to_string();
let mut ns = HashSet::new();
for d in s.chars(){
if ns.contains(&d) {
return false;
}
ns.insert(d);
}
for i in 1..(s.len()+1) {
// 48 is the char offset for '0'
let c = char::from_u32(i as u32 + 48).unwrap();
if !ns.contains(&c){
return false;
}
}
true
}
pub fn problem_041() -> usize {
// pandigital numbers with 9 or 8 digits cannot be prime
// because the digits will sum to 45 or 36 respectively so
// they are divisible by 3
let primes = utils::prime_sieve(7654322);
for &p in primes.iter().rev(){
if is_pandigital(p as u64){
return p as usize
}
}
0
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_041() {
let ans: usize = problem_041();
println!("Answer to Problem 41: {}", ans);
assert!(ans == 7652413)
}
#[bench]
fn bench_problem_041(b: &mut Bencher) {
b.iter(|| problem_041());
}
}
|
#![allow(non_snake_case)]
extern crate log;
extern crate lazy_static;
#[cfg(not(target_os = "windows"))]
extern crate socket2;
use std::net;
use std::time;
use std::thread;
use std::sync::{Arc, Mutex, mpsc};
use crate::utils;
use lazy_static::lazy_static;
#[allow(unused_imports)]
use log::{trace, debug, info, warn, error, Level};
#[cfg(not(any(target_os = "windows", target_os = "android")))]
use crate::server_tun;
use crate::server_proxy;
use crate::encoder::{Encoder, EncoderMethods};
use crate::encoder::aes256gcm::AES256GCM;
use crate::encoder::chacha20poly1305::ChaCha20;
lazy_static! {
static ref TUN_MODE: Mutex<u8> = Mutex::new(0); // 0: off, 1: tcp, 2: udp, 3: both
static ref PROXY_MODE: Mutex<bool> = Mutex::new(false);
static ref NO_PORT_JUMP:Mutex<bool> = Mutex::new(false);
}
pub fn run(KEY:&'static str, METHOD:&'static EncoderMethods, BIND_ADDR:&'static str,
PORT_START: u32, PORT_END: u32, BUFFER_SIZE: usize, TUN_IP: Option<String>,
TUN_PROTO: String, MTU: usize, _NO_PORT_JUMP: bool, _WITH_PROXY: bool) {
let (tx_tun, rx_tun) = mpsc::channel();
let (tx_proxy, rx_proxy) = mpsc::channel();
*NO_PORT_JUMP.lock().unwrap() = _NO_PORT_JUMP;
*TUN_MODE.lock().unwrap() = match TUN_IP{
Some(tun_ip) => {
#[cfg(any(target_os = "windows", target_os = "android"))]
{
error!("Error: tun mode does not support Windows and Android for now");
std::process::exit(-1);
}
#[cfg(not(any(target_os = "windows", target_os = "android")))]
{
let (tun_mode, proto_info) = match TUN_PROTO.to_uppercase().as_str() {
"TCP" => (1, "TCP"),
"UDP" => (2, "UDP"),
//"BOTH"=> (3, "TCP&UDP"), // not supported yet
_ => {
error!("Invalid tun protocol: [{}], available protocol: [ TCP | UDP | BOTH ]", TUN_PROTO);
std::process::exit(-1);
}
};
info!("TT {}, Server (tun mode on {})", env!("CARGO_PKG_VERSION"), proto_info);
thread::spawn( move || server_tun::handle_connection(rx_tun, BUFFER_SIZE, &tun_ip, tun_mode, MTU));
tun_mode
}
},
None => 0
};
*PROXY_MODE.lock().unwrap() = match _WITH_PROXY {
false if *TUN_MODE.lock().unwrap() > 0 => false,
_ => {
info!("TT {}, Server (proxy mode)", env!("CARGO_PKG_VERSION"));
thread::spawn( move || server_proxy::handle_connection(rx_proxy, BUFFER_SIZE));
true
},
};
let mut time_now = utils::get_secs_now();
let time_start = if (PORT_END - PORT_START) > 2
&& utils::get_port(utils::get_otp(KEY, time_now/60 - 1), PORT_START, PORT_END)
!= utils::get_port(utils::get_otp(KEY, time_now/60), PORT_START, PORT_END){
time_now/60 - 1
}
else{
time_now/60
};
for i in time_start .. (time_now/60 + 2) {
let _tx_proxy = tx_proxy.clone();
let _tx_tun = tx_tun.clone();
let _tx_tun = tx_tun.clone();
thread::spawn( move || start_listener(_tx_proxy, _tx_tun, KEY, METHOD, BIND_ADDR, PORT_START, PORT_END, i));
thread::sleep(time::Duration::from_millis(100));
}
loop {
// wait 2 more secs, let conflicted port to close itself,
// and not conflict with any thread that waiting for this same port
//
thread::sleep(time::Duration::from_secs( 60 - (time_now % 60) + 2 ));
time_now = utils::get_secs_now();
let _tx_proxy = tx_proxy.clone();
let _tx_tun = tx_tun.clone();
let _tx_tun = tx_tun.clone();
thread::spawn( move || start_listener(
_tx_proxy, _tx_tun, KEY, METHOD, BIND_ADDR, PORT_START, PORT_END, time_now/60 + 1)
);
}
/*
let mut sched = JobScheduler::new();
sched.add(Job::new("0 * * * * *".parse().unwrap(), || {
thread::spawn( move || start_listener(
KEY, BIND_ADDR, PORT_RANGE_START, PORT_RANGE_END, BUFFER_SIZE, utils::get_secs_now()/60 + 1));
}));
loop {
sched.tick();
std::thread::sleep(time::Duration::from_millis(500));
}
*/
}
pub fn start_listener(tx_proxy: mpsc::Sender<(net::TcpStream, Encoder)>,
#[cfg(target_os = "windows")]
tx_tun: mpsc::Sender<(net::TcpStream, Encoder)>,
#[cfg(not(target_os = "windows"))]
tx_tun: mpsc::Sender<(socket2::Socket, Encoder)>,
KEY:&'static str, METHOD:&EncoderMethods, BIND_ADDR:&'static str,
PORT_RANGE_START:u32, PORT_RANGE_END:u32, time_start:u64) {
let otp = utils::get_otp(KEY, time_start);
let port = utils::get_port(otp, PORT_RANGE_START, PORT_RANGE_END);
let lifetime = utils::get_lifetime(otp);
let encoder = match METHOD {
EncoderMethods::AES256 => Encoder::AES256(AES256GCM::new(KEY, otp)),
EncoderMethods::ChaCha20 => Encoder::ChaCha20(ChaCha20::new(KEY, otp)),
};
let streams = Arc::new(Mutex::new(Vec::new()));
let flag_stop = Arc::new(Mutex::new(0));
/* 1. not using JobScheduler, cause it adds too much stupid code here.
* 2. can't find a proper way to drop listener inside _timer_thread,
* tried: Box + raw pointer, Arc<Mutex<listener>>...
* 3. So we use 'flag_stop' to transfer the status, and connect to the port to break
* the main thread from listener.incoming()
*/
let mut time_now = utils::get_secs_now();
#[cfg(not(target_os = "windows"))]
if *TUN_MODE.lock().unwrap() == 2 {
let _tx_tun = tx_tun.clone();
let _encoder = encoder.clone();
let _streams = Arc::clone(&streams);
let _flag_stop = Arc::clone(&flag_stop);
thread::spawn( move || start_listener_udp(_tx_tun, _encoder, BIND_ADDR, port, lifetime, _streams, _flag_stop));
}
if *PROXY_MODE.lock().unwrap() || *TUN_MODE.lock().unwrap() == 1 {
let _tx_tun = tx_tun.clone();
let _encoder = encoder.clone();
let _streams = Arc::clone(&streams);
let _flag_stop = Arc::clone(&flag_stop);
thread::spawn( move || start_listener_tcp(tx_proxy, _tx_tun, _encoder, &BIND_ADDR, port, lifetime, _streams, _flag_stop));
}
loop {
thread::sleep(time::Duration::from_secs( 60 - (time_now % 60) )); // once a minute
time_now = utils::get_secs_now();
let time_diff = match time_now/60 >= time_start {
true => (time_now/60 - time_start) as u8,
false => continue
};
// check lifetime
if time_diff >= lifetime || time_diff > 2 && streams.lock().unwrap().len() == 0 {
*flag_stop.lock().unwrap() = 1;
break;
}
// avoid conflicted ports, stop listening, but do not kill established connections
else if time_diff > 0 &&
(utils::get_port(utils::get_otp(KEY, time_now/60), PORT_RANGE_START, PORT_RANGE_END) == port
|| utils::get_port(utils::get_otp(KEY, time_now/60+1), PORT_RANGE_START, PORT_RANGE_END) == port ){
*flag_stop.lock().unwrap() = (lifetime - time_diff) as usize;
break;
}
}
// #TODO try to close the underlying socket to interrupt
#[allow(unused_must_use)]{
net::TcpStream::connect(format!("127.0.0.1:{}", port));
net::UdpSocket::bind("0.0.0.0:0").unwrap().send_to("2333".as_bytes(), format!("127.0.0.1:{}", port));
}
// If we kill all the existing streams, then the client has to establish a new one to
// resume connection. Also, if we kill streams at the very first seconeds of each
// minute, this seems to be a traffic pattern.
let _flag_stop = *flag_stop.lock().unwrap(); // no tot hold the lock while sleep
thread::sleep(time::Duration::from_secs(
(
( _flag_stop - 1 ) * 60
+
( rand::random::<u8>() % 30 ) as usize
) as u64 ));
if !*NO_PORT_JUMP.lock().unwrap(){
for stream in &*streams.lock().unwrap(){
// for udp, shutdown will only make read/write fail
// the peer will get an ICMP Dest/Port unreachable only when the port is closed.
stream.shutdown(net::Shutdown::Both).unwrap_or_else(|_err|());
drop(stream)
}
}
}
#[cfg(not(target_os = "windows"))]
pub fn start_listener_udp(
tx_tun: mpsc::Sender<(socket2::Socket, Encoder)>,
encoder: Encoder, BIND_ADDR:&'static str, port: u32, lifetime: u8,
streams: Arc<Mutex<Vec<socket2::Socket>>>, flag_stop: Arc<Mutex<usize>>){
let udplistener = socket2::Socket::new(socket2::Domain::ipv4(), socket2::Type::dgram(), None).unwrap();
// have to set reuse before every bind
udplistener.set_reuse_port(true).unwrap();
udplistener.set_reuse_address(true).unwrap();
let mut retry = 0;
loop {
match udplistener.bind(&socket2::SockAddr::from(format!("{}:{}", BIND_ADDR, port).parse::<net::SocketAddr>().unwrap())) {
Ok(_) => {
// unset reuse once bind succeed, to aovid any other thread/process bind on it.
udplistener.set_reuse_port(false).unwrap();
udplistener.set_reuse_address(false).unwrap();
break
},
Err(err) if err.kind() != std::io::ErrorKind::AddrInUse => {
error!("Error binding UDP port: [{}], {:?}", port, err);
return
},
Err(_) => debug!("UDP Port: [{}] in use, {:?}, retry in 2 secs...", port, thread::current().id())
}
retry += 1;
thread::sleep(time::Duration::from_secs(2));
if retry >= 33 { // give up after 66 secs
error!("Failed binding UDP port: [{}], after {} secs", port, retry * 2);
return
}
}
trace!("Open: [UDP:{}], lifetime: [{}]", port, lifetime);
let mut buf_peek = [0u8; 4096];
loop{
//Fix1: Can not peek real packet here, it will not clear the udp buffer, then loop forever;
// Also recv_from will truncates the data, thu consuming the first packet;
// So we make sure the client will send some random trash first.
//
match udplistener.recv_from(&mut buf_peek){
Ok((len, addr)) if len > 1 => {
if *flag_stop.lock().unwrap() > 0 { // 0: ok; 1: stop normally; > 1: stop but sleep some time to kill streams
break;
};
let client_socket = socket2::Socket::new(socket2::Domain::ipv4(), socket2::Type::dgram(), None).unwrap();
let _client_socket = client_socket.try_clone().unwrap();
// set reuse for 2 sockets
udplistener.set_reuse_port(true).unwrap();
udplistener.set_reuse_address(true).unwrap();
client_socket.set_reuse_port(true).unwrap();
client_socket.set_reuse_address(true).unwrap();
client_socket.bind(&socket2::SockAddr::from(format!("{}:{}", BIND_ADDR, port).parse::<net::SocketAddr>().unwrap())).unwrap_or_else(|err|{
error!("client_socket bind error, {}", err);
});
client_socket.connect(&addr).unwrap_or_else(|err|{
error!("client_socket connect error, {}", err);
});
tx_tun.send((client_socket, encoder.clone())).unwrap_or_else(|err|{
error!("send client_socket error, {}", err);
});
streams.lock().unwrap().push(socket2::Socket::from(_client_socket));
// unset reuse for udplistener, to aovid any other thread/process bind on it
udplistener.set_reuse_port(false).unwrap();
udplistener.set_reuse_address(false).unwrap();
},
_ => continue
}
}
trace!("Close: [UDP:{}], lifetime: [{}]", port, lifetime);
}
pub fn start_listener_tcp(
tx_proxy: mpsc::Sender<(net::TcpStream, Encoder)>,
#[cfg(not(target_os = "windows"))]
tx_tun: mpsc::Sender<(socket2::Socket, Encoder)>,
#[cfg(target_os = "windows")]
tx_tun: mpsc::Sender<(net::TcpStream, Encoder)>,
encoder: Encoder, BIND_ADDR:&'static str, port: u32, lifetime: u8,
#[cfg(not(target_os = "windows"))]
streams: Arc<Mutex<Vec<socket2::Socket>>>,
#[cfg(target_os = "windows")]
streams: Arc<Mutex<Vec<net::TcpStream>>>,
flag_stop: Arc<Mutex<usize>>){
let listener;
let mut retry = 0;
loop {
match net::TcpListener::bind(format!("{}:{}", BIND_ADDR, port)) {
Ok(_listener) => {
listener = _listener;
break
},
Err(err) if err.kind() != std::io::ErrorKind::AddrInUse => {
error!("Error binding TCP port: [{}], {:?}", port, err);
return
},
Err(_) => debug!("TCP Port: [{}] in use, {:?}, retry in 2 secs...", port, thread::current().id())
}
retry += 1;
thread::sleep(time::Duration::from_secs(2));
if retry >= 33 { // give up after 66 secs
error!("Failed binding TCP port: [{}], after {} secs", port, retry * 2);
return
}
}
trace!("Open: [TCP:{}], lifetime: [{}]", port, lifetime);
let mut buf_peek = [0u8; 4096];
for stream in listener.incoming() {
if *flag_stop.lock().unwrap() > 0 { // 0: ok; 1: stop normally; > 1: stop but sleep some time to kill streams
break;
};
let stream = match stream {
Ok(stream) => stream,
Err(_) => continue // try not to panic on error "Too many open files"
};
let _stream = match stream.try_clone() {
Ok(_stream) => _stream,
Err(_) => continue, // same as above
};
let tx_tun = tx_tun.clone();
let tx_proxy = tx_proxy.clone();
let streams = streams.clone();
let _encoder = encoder.clone();
thread::spawn( move || {
let mut offset = 0;
let mut data_len = 0;
let mut count = 10;
while count > 0 {
_stream.set_read_timeout(Some(time::Duration::from_secs( rand::random::<u8>() as u64 + 60 ))).unwrap();
let len = match _stream.peek(&mut buf_peek){
Ok(len) if len > 1 => len,
_ => return
};
let (_data_len, _offset) = _encoder.decode(&mut buf_peek[..len]);
if _data_len == 0 && _offset > 0 { // need to read more
//debug!("peek length: {}, data length: {}, lets continue", len, _data_len);
thread::sleep(time::Duration::from_millis(200));
count -= 1;
continue
}
offset = _offset;
data_len = _data_len;
break
}
let index = offset as usize - data_len;
if *PROXY_MODE.lock().expect("PROXY_MODE lock failed") && data_len > 2
&& (
(data_len == 2 + buf_peek[index + 1] as usize) && buf_peek[index] == 0x05 // SOCKS5
|| &buf_peek[index .. index + 9] == "TTCONNECT".as_bytes() // TT CONNECT
|| &buf_peek[index .. index + 7] == "CONNECT".as_bytes() // HTTP CONNECT
|| &buf_peek[index .. index + 3] == "GET".as_bytes() // HTTP
|| &buf_peek[index .. index + 3] == "PUT".as_bytes() // HTTP
|| &buf_peek[index .. index + 4] == "POST".as_bytes() // HTTP
|| &buf_peek[index .. index + 4] == "HEAD".as_bytes() // HTTP
|| &buf_peek[index .. index + 6] == "DELETE".as_bytes() // HTTP
|| &buf_peek[index .. index + 7] == "OPTIONS".as_bytes() // HTTP
){
tx_proxy.send((_stream, _encoder)).expect("Failed: tx_proxy.send()");
return // no need to push proxy stream to die
}
// IP header length: v4>=20, v6>=40, our defined first packet: v4=5, v6=...
else if *TUN_MODE.lock().expect("TUN_MODE lock failed") == 1 && data_len >= 5
&& (buf_peek[index]>>4 == 0x4 || buf_peek[index]>>4 == 0x6){
#[cfg(not(target_os = "windows"))]
tx_tun.send((socket2::Socket::from(_stream), _encoder)).unwrap();
}
#[cfg(not(target_os = "windows"))]
streams.lock().unwrap().push(socket2::Socket::from(stream)); // push streams here, to be killed
#[cfg(target_os = "windows")]
streams.lock().unwrap().push(stream); // push streams here, to be killed
});
}
trace!("Close: [TCP:{}], lifetime: [{}]", port, lifetime);
}
|
use super::{Expression, JsonValue, visitor::ExpressionVisitor};
#[derive(Debug)]
pub struct ValueExpression {
pub value: JsonValue
}
impl ValueExpression {
pub fn new(value: JsonValue) -> ValueExpression {
ValueExpression {value}
}
}
impl Expression for ValueExpression {
fn accept(&mut self, visitor: &mut dyn ExpressionVisitor) {
visitor.visit_value(self)
}
} |
#[doc = "Register `DAC_STR2` reader"]
pub type R = crate::R<DAC_STR2_SPEC>;
#[doc = "Register `DAC_STR2` writer"]
pub type W = crate::W<DAC_STR2_SPEC>;
#[doc = "Field `STRSTDATA2` reader - DAC Channel 2 Sawtooth reset value"]
pub type STRSTDATA2_R = crate::FieldReader<u16>;
#[doc = "Field `STRSTDATA2` writer - DAC Channel 2 Sawtooth reset value"]
pub type STRSTDATA2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 12, O, u16>;
#[doc = "Field `STDIR2` reader - DAC Channel2 Sawtooth direction setting"]
pub type STDIR2_R = crate::BitReader;
#[doc = "Field `STDIR2` writer - DAC Channel2 Sawtooth direction setting"]
pub type STDIR2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `STINCDATA2` reader - DAC CH2 Sawtooth increment value (12.4 bit format)"]
pub type STINCDATA2_R = crate::FieldReader<u16>;
#[doc = "Field `STINCDATA2` writer - DAC CH2 Sawtooth increment value (12.4 bit format)"]
pub type STINCDATA2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:11 - DAC Channel 2 Sawtooth reset value"]
#[inline(always)]
pub fn strstdata2(&self) -> STRSTDATA2_R {
STRSTDATA2_R::new((self.bits & 0x0fff) as u16)
}
#[doc = "Bit 12 - DAC Channel2 Sawtooth direction setting"]
#[inline(always)]
pub fn stdir2(&self) -> STDIR2_R {
STDIR2_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bits 16:31 - DAC CH2 Sawtooth increment value (12.4 bit format)"]
#[inline(always)]
pub fn stincdata2(&self) -> STINCDATA2_R {
STINCDATA2_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:11 - DAC Channel 2 Sawtooth reset value"]
#[inline(always)]
#[must_use]
pub fn strstdata2(&mut self) -> STRSTDATA2_W<DAC_STR2_SPEC, 0> {
STRSTDATA2_W::new(self)
}
#[doc = "Bit 12 - DAC Channel2 Sawtooth direction setting"]
#[inline(always)]
#[must_use]
pub fn stdir2(&mut self) -> STDIR2_W<DAC_STR2_SPEC, 12> {
STDIR2_W::new(self)
}
#[doc = "Bits 16:31 - DAC CH2 Sawtooth increment value (12.4 bit format)"]
#[inline(always)]
#[must_use]
pub fn stincdata2(&mut self) -> STINCDATA2_W<DAC_STR2_SPEC, 16> {
STINCDATA2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Sawtooth register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dac_str2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dac_str2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DAC_STR2_SPEC;
impl crate::RegisterSpec for DAC_STR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dac_str2::R`](R) reader structure"]
impl crate::Readable for DAC_STR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dac_str2::W`](W) writer structure"]
impl crate::Writable for DAC_STR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DAC_STR2 to value 0"]
impl crate::Resettable for DAC_STR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::collections::{HashSet, VecDeque};
fn main() -> std::io::Result<()> {
let input = std::fs::read_to_string("examples/21/input.txt")?;
let mut lists: Vec<IngredientList> = input
.lines()
.map(|line| {
let mut it = line.split("(contains").map(|x| x.trim());
let ingredients = it
.next()
.unwrap()
.split(' ')
.map(|x| x.to_string())
.collect();
let allergens = it
.next()
.unwrap()
.trim_end_matches(')')
.split(',')
.map(|x| x.trim().to_string())
.collect();
IngredientList {
ingredients,
allergens,
}
})
.collect();
let allergens: HashSet<String> = lists
.iter()
.flat_map(|list| list.allergens.clone())
.collect();
let ingredients_may_contain_allergens = allergens
.iter()
.flat_map(|allergen| {
let mut it = lists
.iter()
.filter(|list| list.allergens.contains(allergen))
.map(|x| &x.ingredients);
let mut intersection = it.next().unwrap().clone();
for x in it {
intersection = intersection.intersection(x).cloned().collect();
}
intersection
})
.collect();
let n: usize = lists
.iter()
.map(|x| {
x.ingredients
.difference(&ingredients_may_contain_allergens)
.count()
})
.sum();
println!("{}", n);
// Part 2
let mut entries = Vec::new();
let mut allergens_v: VecDeque<_> = allergens.iter().collect();
while let Some(allergen) = allergens_v.pop_back() {
let mut it = lists
.iter()
.filter(|list| list.allergens.contains(allergen))
.map(|x| &x.ingredients);
let mut intersection = it.next().unwrap().clone();
for x in it {
intersection = intersection.intersection(x).cloned().collect();
}
if intersection.len() == 1 {
let ingredient = intersection.iter().cloned().next().unwrap();
entries.push((allergen, ingredient.clone()));
for list in &mut lists {
list.allergens.remove(allergen);
list.ingredients.remove(&ingredient);
}
} else {
allergens_v.push_front(allergen);
}
}
entries.sort();
let solution = entries
.into_iter()
.map(|x| x.1)
.collect::<Vec<_>>()
.join(",");
println!("{:?}", solution);
Ok(())
}
#[derive(Clone, Debug)]
struct IngredientList {
ingredients: HashSet<String>,
allergens: HashSet<String>,
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
//! File operations
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::{mem, io};
use std::fs::{self, File, OpenOptions};
use std::io::{Read, BufReader, Write, Seek, SeekFrom};
use std::path::Path;
use crate::model::data_store::DatasetDto;
/// Read metadata of data file.
pub fn load_metadata_from_file(file_name: &str) -> std::io::Result<(usize, usize)> {
let file = File::open(file_name)?;
let mut reader = BufReader::new(file);
let npoints = reader.read_i32::<LittleEndian>()? as usize;
let ndims = reader.read_i32::<LittleEndian>()? as usize;
Ok((npoints, ndims))
}
/// Read the deleted vertex ids from file.
pub fn load_ids_to_delete_from_file(file_name: &str) -> std::io::Result<(usize, Vec<u32>)> {
// The first 4 bytes are the number of vector ids.
// The rest of the file are the vector ids in the format of usize.
// The vector ids are sorted in ascending order.
let mut file = File::open(file_name)?;
let num_ids = file.read_u32::<LittleEndian>()? as usize;
let mut ids = Vec::with_capacity(num_ids);
for _ in 0..num_ids {
let id = file.read_u32::<LittleEndian>()?;
ids.push(id);
}
Ok((num_ids, ids))
}
/// Copy data from file
/// # Arguments
/// * `bin_file` - filename where the data is
/// * `data` - destination dataset dto to which the data is copied
/// * `pts_offset` - offset of points. data will be loaded after this point in dataset
/// * `npts` - number of points read from bin_file
/// * `dim` - point dimension read from bin_file
/// * `rounded_dim` - rounded dimension (padding zero if it's > dim)
/// # Return
/// * `npts` - number of points read from bin_file
/// * `dim` - point dimension read from bin_file
pub fn copy_aligned_data_from_file<T: Default + Copy>(
bin_file: &str,
dataset_dto: DatasetDto<T>,
pts_offset: usize,
) -> std::io::Result<(usize, usize)> {
let mut reader = File::open(bin_file)?;
let npts = reader.read_i32::<LittleEndian>()? as usize;
let dim = reader.read_i32::<LittleEndian>()? as usize;
let rounded_dim = dataset_dto.rounded_dim;
let offset = pts_offset * rounded_dim;
for i in 0..npts {
let data_slice = &mut dataset_dto.data[offset + i * rounded_dim..offset + i * rounded_dim + dim];
let mut buf = vec![0u8; dim * mem::size_of::<T>()];
reader.read_exact(&mut buf)?;
let ptr = buf.as_ptr() as *const T;
let temp_slice = unsafe { std::slice::from_raw_parts(ptr, dim) };
data_slice.copy_from_slice(temp_slice);
(i * rounded_dim + dim..i * rounded_dim + rounded_dim).for_each(|j| {
dataset_dto.data[j] = T::default();
});
}
Ok((npts, dim))
}
/// Open a file to write
/// # Arguments
/// * `writer` - mutable File reference
/// * `file_name` - file name
#[inline]
pub fn open_file_to_write(file_name: &str) -> std::io::Result<File> {
OpenOptions::new()
.write(true)
.create(true)
.open(Path::new(file_name))
}
/// Delete a file
/// # Arguments
/// * `file_name` - file name
pub fn delete_file(file_name: &str) -> std::io::Result<()> {
if file_exists(file_name) {
fs::remove_file(file_name)?;
}
Ok(())
}
/// Check whether file exists or not
pub fn file_exists(filename: &str) -> bool {
std::path::Path::new(filename).exists()
}
/// Save data to file
/// # Arguments
/// * `filename` - filename where the data is
/// * `data` - information data
/// * `npts` - number of points
/// * `ndims` - point dimension
/// * `aligned_dim` - aligned dimension
/// * `offset` - data offset in file
pub fn save_data_in_base_dimensions<T: Default + Copy>(
filename: &str,
data: &mut [T],
npts: usize,
ndims: usize,
aligned_dim: usize,
offset: usize,
) -> std::io::Result<usize> {
let mut writer = open_file_to_write(filename)?;
let npts_i32 = npts as i32;
let ndims_i32 = ndims as i32;
let bytes_written = 2 * std::mem::size_of::<u32>() + npts * ndims * (std::mem::size_of::<T>());
writer.seek(std::io::SeekFrom::Start(offset as u64))?;
writer.write_all(&npts_i32.to_le_bytes())?;
writer.write_all(&ndims_i32.to_le_bytes())?;
let data_ptr = data.as_ptr() as *const u8;
for i in 0..npts {
let middle_offset = i * aligned_dim * std::mem::size_of::<T>();
let middle_slice = unsafe { std::slice::from_raw_parts(data_ptr.add(middle_offset), ndims * std::mem::size_of::<T>()) };
writer.write_all(middle_slice)?;
}
writer.flush()?;
Ok(bytes_written)
}
/// Read data file
/// # Arguments
/// * `bin_file` - filename where the data is
/// * `file_offset` - data offset in file
/// * `data` - information data
/// * `npts` - number of points
/// * `ndims` - point dimension
pub fn load_bin<T: Copy>(
bin_file: &str,
file_offset: usize) -> std::io::Result<(Vec<T>, usize, usize)>
{
let mut reader = File::open(bin_file)?;
reader.seek(std::io::SeekFrom::Start(file_offset as u64))?;
let npts = reader.read_i32::<LittleEndian>()? as usize;
let dim = reader.read_i32::<LittleEndian>()? as usize;
let size = npts * dim * std::mem::size_of::<T>();
let mut buf = vec![0u8; size];
reader.read_exact(&mut buf)?;
let ptr = buf.as_ptr() as *const T;
let data = unsafe { std::slice::from_raw_parts(ptr, npts * dim)};
Ok((data.to_vec(), npts, dim))
}
/// Get file size
pub fn get_file_size(filename: &str) -> io::Result<u64> {
let reader = File::open(filename)?;
let metadata = reader.metadata()?;
Ok(metadata.len())
}
macro_rules! save_bin {
($name:ident, $t:ty, $write_func:ident) => {
/// Write data into file
pub fn $name(filename: &str, data: &[$t], num_pts: usize, dims: usize, offset: usize) -> std::io::Result<usize> {
let mut writer = open_file_to_write(filename)?;
println!("Writing bin: {}", filename);
writer.seek(SeekFrom::Start(offset as u64))?;
let num_pts_i32 = num_pts as i32;
let dims_i32 = dims as i32;
let bytes_written = num_pts * dims * mem::size_of::<$t>() + 2 * mem::size_of::<u32>();
writer.write_i32::<LittleEndian>(num_pts_i32)?;
writer.write_i32::<LittleEndian>(dims_i32)?;
println!("bin: #pts = {}, #dims = {}, size = {}B", num_pts, dims, bytes_written);
for item in data.iter() {
writer.$write_func::<LittleEndian>(*item)?;
}
writer.flush()?;
println!("Finished writing bin.");
Ok(bytes_written)
}
};
}
save_bin!(save_bin_f32, f32, write_f32);
save_bin!(save_bin_u64, u64, write_u64);
save_bin!(save_bin_u32, u32, write_u32);
#[cfg(test)]
mod file_util_test {
use crate::model::data_store::InmemDataset;
use std::fs;
use super::*;
pub const DIM_8: usize = 8;
#[test]
fn load_metadata_test() {
let file_name = "test_load_metadata_test.bin";
let data = [200, 0, 0, 0, 128, 0, 0, 0]; // 200 and 128 in little endian bytes
std::fs::write(file_name, data).expect("Failed to write sample file");
match load_metadata_from_file(file_name) {
Ok((npoints, ndims)) => {
assert!(npoints == 200);
assert!(ndims == 128);
},
Err(_e) => {},
}
fs::remove_file(file_name).expect("Failed to delete file");
}
#[test]
fn load_data_test() {
let file_name = "test_load_data_test.bin";
//npoints=2, dim=8, 2 vectors [1.0;8] [2.0;8]
let data: [u8; 72] = [2, 0, 0, 0, 8, 0, 0, 0,
0x00, 0x00, 0x80, 0x3f, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x40, 0x40, 0x00, 0x00, 0x80, 0x40,
0x00, 0x00, 0xa0, 0x40, 0x00, 0x00, 0xc0, 0x40, 0x00, 0x00, 0xe0, 0x40, 0x00, 0x00, 0x00, 0x41,
0x00, 0x00, 0x10, 0x41, 0x00, 0x00, 0x20, 0x41, 0x00, 0x00, 0x30, 0x41, 0x00, 0x00, 0x40, 0x41,
0x00, 0x00, 0x50, 0x41, 0x00, 0x00, 0x60, 0x41, 0x00, 0x00, 0x70, 0x41, 0x00, 0x00, 0x80, 0x41];
std::fs::write(file_name, data).expect("Failed to write sample file");
let mut dataset = InmemDataset::<f32, DIM_8>::new(2, 1f32).unwrap();
match copy_aligned_data_from_file(file_name, dataset.into_dto(), 0) {
Ok((num_points, dim)) => {
fs::remove_file(file_name).expect("Failed to delete file");
assert!(num_points == 2);
assert!(dim == 8);
assert!(dataset.data.len() == 16);
let first_vertex = dataset.get_vertex(0).unwrap();
let second_vertex = dataset.get_vertex(1).unwrap();
assert!(*first_vertex.vector() == [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]);
assert!(*second_vertex.vector() == [9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0]);
},
Err(e) => {
fs::remove_file(file_name).expect("Failed to delete file");
panic!("{}", e)
},
}
}
#[test]
fn open_file_to_write_test() {
let file_name = "test_open_file_to_write_test.bin";
let mut writer = File::create(file_name).unwrap();
let data = [200, 0, 0, 0, 128, 0, 0, 0];
writer.write(&data).expect("Failed to write sample file");
let _ = open_file_to_write(file_name);
fs::remove_file(file_name).expect("Failed to delete file");
}
#[test]
fn delete_file_test() {
let file_name = "test_delete_file_test.bin";
let mut file = File::create(file_name).unwrap();
writeln!(file, "test delete file").unwrap();
let result = delete_file(file_name);
assert!(result.is_ok());
assert!(fs::metadata(file_name).is_err());
}
#[test]
fn save_data_in_base_dimensions_test() {
//npoints=2, dim=8
let mut data: [u8; 72] = [2, 0, 0, 0, 8, 0, 0, 0,
0x00, 0x00, 0x80, 0x3f, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x40, 0x40, 0x00, 0x00, 0x80, 0x40,
0x00, 0x00, 0xa0, 0x40, 0x00, 0x00, 0xc0, 0x40, 0x00, 0x00, 0xe0, 0x40, 0x00, 0x00, 0x00, 0x41,
0x00, 0x00, 0x10, 0x41, 0x00, 0x00, 0x20, 0x41, 0x00, 0x00, 0x30, 0x41, 0x00, 0x00, 0x40, 0x41,
0x00, 0x00, 0x50, 0x41, 0x00, 0x00, 0x60, 0x41, 0x00, 0x00, 0x70, 0x41, 0x00, 0x00, 0x80, 0x41];
let num_points = 2;
let dim = DIM_8;
let data_file = "save_data_in_base_dimensions_test.data";
match save_data_in_base_dimensions(data_file, &mut data, num_points, dim, DIM_8, 0) {
Ok(num) => {
assert!(file_exists(data_file));
assert_eq!(num, 2 * std::mem::size_of::<u32>() + num_points * dim * std::mem::size_of::<u8>());
fs::remove_file(data_file).expect("Failed to delete file");
},
Err(e) => {
fs::remove_file(data_file).expect("Failed to delete file");
panic!("{}", e)
}
}
}
#[test]
fn save_bin_test() {
let filename = "save_bin_test";
let data = vec![0u64, 1u64, 2u64];
let num_pts = data.len();
let dims = 1;
let bytes_written = save_bin_u64(filename, &data, num_pts, dims, 0).unwrap();
assert_eq!(bytes_written, 32);
let mut file = File::open(filename).unwrap();
let mut buffer = vec![];
let npts_read = file.read_i32::<LittleEndian>().unwrap() as usize;
let dims_read = file.read_i32::<LittleEndian>().unwrap() as usize;
file.read_to_end(&mut buffer).unwrap();
let data_read: Vec<u64> = buffer
.chunks_exact(8)
.map(|b| u64::from_le_bytes([b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7]]))
.collect();
std::fs::remove_file(filename).unwrap();
assert_eq!(num_pts, npts_read);
assert_eq!(dims, dims_read);
assert_eq!(data, data_read);
}
#[test]
fn load_bin_test() {
let file_name = "load_bin_test";
let data = vec![0u64, 1u64, 2u64];
let num_pts = data.len();
let dims = 1;
let bytes_written = save_bin_u64(file_name, &data, num_pts, dims, 0).unwrap();
assert_eq!(bytes_written, 32);
let (load_data, load_num_pts, load_dims) = load_bin::<u64>(file_name, 0).unwrap();
assert_eq!(load_num_pts, num_pts);
assert_eq!(load_dims, dims);
assert_eq!(load_data, data);
std::fs::remove_file(file_name).unwrap();
}
#[test]
fn load_bin_offset_test() {
let offset:usize = 32;
let file_name = "load_bin_offset_test";
let data = vec![0u64, 1u64, 2u64];
let num_pts = data.len();
let dims = 1;
let bytes_written = save_bin_u64(file_name, &data, num_pts, dims, offset).unwrap();
assert_eq!(bytes_written, 32);
let (load_data, load_num_pts, load_dims) = load_bin::<u64>(file_name, offset).unwrap();
assert_eq!(load_num_pts, num_pts);
assert_eq!(load_dims, dims);
assert_eq!(load_data, data);
std::fs::remove_file(file_name).unwrap();
}
}
|
use crate::{
grid::{
config::{ColoredConfig, Entity, Position, SpannedConfig},
records::{ExactRecords, Records},
},
settings::CellOption,
};
/// Columns (Vertical) span.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct ColumnSpan {
size: usize,
}
impl ColumnSpan {
/// Creates a new column (vertical) span.
pub fn new(size: usize) -> Self {
Self { size }
}
/// Creates a new column (vertical) span with a maximux value possible.
pub fn max() -> Self {
Self::new(usize::MAX)
}
}
impl<R> CellOption<R, ColoredConfig> for ColumnSpan
where
R: Records + ExactRecords,
{
fn change(self, records: &mut R, cfg: &mut ColoredConfig, entity: Entity) {
let count_rows = records.count_rows();
let count_cols = records.count_columns();
set_col_spans(cfg, self.size, entity, (count_rows, count_cols));
remove_false_spans(cfg);
}
}
fn set_col_spans(cfg: &mut SpannedConfig, span: usize, entity: Entity, shape: (usize, usize)) {
for pos in entity.iter(shape.0, shape.1) {
if !is_valid_pos(pos, shape) {
continue;
}
let mut span = span;
if !is_column_span_valid(pos.1, span, shape.1) {
span = shape.1 - pos.1;
}
if span_has_intersections(cfg, pos, span) {
continue;
}
set_span_column(cfg, pos, span);
}
}
fn set_span_column(cfg: &mut SpannedConfig, pos: (usize, usize), span: usize) {
if span == 0 {
let (row, col) = pos;
if col == 0 {
return;
}
if let Some(closecol) = closest_visible(cfg, (row, col - 1)) {
let span = col + 1 - closecol;
cfg.set_column_span((row, closecol), span);
}
}
cfg.set_column_span(pos, span);
}
fn closest_visible(cfg: &SpannedConfig, mut pos: Position) -> Option<usize> {
loop {
if cfg.is_cell_visible(pos) {
return Some(pos.1);
}
if pos.1 == 0 {
return None;
}
pos.1 -= 1;
}
}
fn is_column_span_valid(col: usize, span: usize, count_cols: usize) -> bool {
span + col <= count_cols
}
fn is_valid_pos((row, col): Position, (count_rows, count_cols): (usize, usize)) -> bool {
row < count_rows && col < count_cols
}
fn span_has_intersections(cfg: &SpannedConfig, (row, col): Position, span: usize) -> bool {
for col in col..col + span {
if !cfg.is_cell_visible((row, col)) {
return true;
}
}
false
}
fn remove_false_spans(cfg: &mut SpannedConfig) {
for (pos, _) in cfg.get_column_spans() {
if cfg.is_cell_visible(pos) {
continue;
}
cfg.set_row_span(pos, 1);
cfg.set_column_span(pos, 1);
}
for (pos, _) in cfg.get_row_spans() {
if cfg.is_cell_visible(pos) {
continue;
}
cfg.set_row_span(pos, 1);
cfg.set_column_span(pos, 1);
}
}
|
use crate::headers::*;
use crate::AddAsHeader;
use chrono::{DateTime, Utc};
use http::request::Builder;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum IfSourceModifiedSinceCondition {
Modified(DateTime<Utc>),
Unmodified(DateTime<Utc>),
}
impl AddAsHeader for IfSourceModifiedSinceCondition {
fn add_as_header(&self, builder: Builder) -> Builder {
match self {
IfSourceModifiedSinceCondition::Modified(date) => {
builder.header(SOURCE_IF_MODIFIED_SINCE, &date.to_rfc2822() as &str)
}
IfSourceModifiedSinceCondition::Unmodified(date) => {
builder.header(SOURCE_IF_UNMODIFIED_SINCE, &date.to_rfc2822() as &str)
}
}
}
fn add_as_header2(
&self,
request: &mut crate::Request,
) -> Result<(), crate::errors::HTTPHeaderError> {
let (header_name, header_value) = match self {
IfSourceModifiedSinceCondition::Modified(date) => {
(SOURCE_IF_MODIFIED_SINCE, date.to_rfc2822())
}
IfSourceModifiedSinceCondition::Unmodified(date) => {
(SOURCE_IF_UNMODIFIED_SINCE, date.to_rfc2822())
}
};
request
.headers_mut()
.append(header_name, http::HeaderValue::from_str(&header_value)?);
Ok(())
}
}
|
use crate::pb::{
maine::maine_service_client::MaineServiceClient,
ragdoll::ragdoll_internal_service_client::RagdollInternalServiceClient,
};
#[derive(Debug, Clone)]
pub struct Client {
pub regdoll: RagdollInternalServiceClient<tonic::transport::Channel>,
pub maine: MaineServiceClient<tonic::transport::Channel>,
}
use envconfig::Envconfig;
#[derive(Debug, Clone, Envconfig)]
pub struct Config {
#[envconfig(from = "RPC_DST_RAGDOLL")]
pub dst_ragdoll: String,
#[envconfig(from = "RPC_DST_MAINE")]
pub dst_maine: String,
}
impl Client {
pub async fn new(cfg: &Config) -> Self {
let cfg = cfg.clone();
Client {
regdoll: RagdollInternalServiceClient::<tonic::transport::Channel>::connect(
cfg.dst_ragdoll,
)
.await
.unwrap(),
maine: MaineServiceClient::<tonic::transport::Channel>::connect(cfg.dst_maine)
.await
.unwrap(),
}
}
}
|
use aoc2019::intcode;
use aoc2019::io::{slurp_stdin, parse_intcode_program};
fn main() {
let data = parse_intcode_program(&slurp_stdin());
let mut input: Vec<intcode::Mem> = vec![1];
let mut output: Vec<intcode::Mem> = Vec::new();
intcode::run_program_splitio(data.clone(), &mut input, &mut output).unwrap();
println!("{}", output.iter().last().unwrap());
input = vec![5];
output = Vec::new();
intcode::run_program_splitio(data, &mut input, &mut output).unwrap();
println!("{}", output.iter().last().unwrap());
}
|
use std::fmt;
use std::fmt::{Formatter, write};
use std::hash::Hash;
use futures::executor::block_on;
use crate::Poll::Pending;
async fn say() {
println!("hi")
}
#[derive(Debug)]
struct Song {
title: String,
}
async fn learn_song() -> Song {
println!("i learned the song Radio");
Song {
title: "Radio".to_string(),
}
}
async fn sing_song(song: Song) {
println!("lalala {}", song)
}
async fn dance() {
println!("dancing cool dance")
}
async fn learn_and_sing() {
let song = learn_song().await;
sing_song(song).await
}
impl fmt::Display for Song {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.title)
}
}
async fn a_main() {
let f1 = learn_and_sing();
let f2 = dance();
futures::join!(f1, f2);
}
// SIMPLE FUTURE TRAIT
enum Poll<T> {
Ready(T),
Pending,
}
trait SimpleFuture {
type Output;
fn poll(&mut self, wake: fn()) -> Poll<Self::Output>;
}
struct Socket;
impl Socket {
fn has_data(&self) -> bool {
true
}
fn read_buf(&self) -> Vec<u8> {
vec![]
}
fn set_readable_callback(&self, _wake: fn()) {}
}
// READ FROM SOCKET
pub struct SocketRead<'a> {
socket: &'a Socket,
}
impl SimpleFuture for SocketRead<'_> {
type Output = Vec<u8>;
fn poll(&mut self, wake: fn()) -> Poll<Self::Output> {
if self.socket.has_data() {
Poll::Ready(self.socket.read_buf())
} else {
self.socket.set_readable_callback(wake);
Poll::Pending
}
}
}
pub struct Join<FutureA, FutureB> {
a: Option<FutureA>,
b: Option<FutureB>,
}
impl<FutureA, FutureB> SimpleFuture for Join<FutureA, FutureB>
where
FutureA: SimpleFuture<Output = ()>,
FutureB: SimpleFuture<Output = ()>,
{
type Output = ();
fn poll(&mut self, wake: fn()) -> Poll<Self::Output> {
if let Some(a) = &mut self.a {
if let Poll::Ready(()) = a.poll(wake) {
self.a.take();
}
}
if let Some(b) = &mut self.b {
if let Poll::Ready(()) = b.poll(wake) {
self.b.take();
}
}
if self.a.is_none() && self.b.is_none() {
Poll::Ready(())
} else {
Poll::Pending
}
}
}
fn main() {
// println!("hi main");
//
// let future = say();
// block_on(a_main());
let bar = [1, 2, 3];
let foos = bar.iter()
.map(|x| format!("{}", x))
.collect::<Vec<String>>();
let foos: Vec<_> = bar.iter()
.map(|x| format!("{}", x))
.collect();
// print_type();
}
fn print_type<T>(_: &T) {
println!("{}", std::any::type_name::<T>())
}
|
use ordered_float::OrderedFloat;
#[derive(Debug)]
struct MaStruct {
no_u32: u32,
no_string: String,
float: OrderedFloat<f64>,
}
fn main() {
let s1 = MaStruct {
no_u32: 10,
no_string: "0A".to_string(),
float: OrderedFloat(2.0)
};
let s2 = MaStruct {
no_u32: 5,
no_string: "BA".to_string(),
float: OrderedFloat(1.1)
};
let s3 = MaStruct {
no_u32: 100,
no_string: "00".to_string(),
float: OrderedFloat(3.2)
};
let s4 = MaStruct {
no_u32: 9,
no_string: "0C".to_string(),
float: OrderedFloat(1.1)
};
let mut vec = vec![s1, s2, s3, s4];
// Ne fonctionne pas avec f64 sans OrderedFloat
/*vec.sort_by(|a, b| {
let a_key = (&a.no_u32, &a.no_string);
let b_key = (&b.no_u32, &b.no_string);
a_key.cmp(&b_key)
});*/
vec.sort_by(|a, b| {
let a_key = (&a.float, &a.no_string);
let b_key = (&b.float, &b.no_string);
a_key.partial_cmp(&b_key).unwrap()
});
//vec.sort_by_key(|a| a.float);
println!("{:?}", vec);
}
|
// Copyright (c) SimpleStaking and Tezedge Contributors
// SPDX-License-Identifier: MIT
/// Rust implementation of messages required for Rust <-> OCaml FFI communication.
use std::collections::HashMap;
use std::fmt;
use std::fmt::Debug;
use std::mem::size_of;
use derive_builder::Builder;
use failure::Fail;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use serde::de::DeserializeOwned;
use znfe::OCamlError;
use crypto::hash::{BlockHash, ChainId, ContextHash, HashType, OperationHash, ProtocolHash};
use tezos_encoding::{binary_writer, ser};
use tezos_encoding::binary_reader::{BinaryReader, BinaryReaderError};
use tezos_encoding::de::from_value as deserialize_from_value;
use tezos_encoding::encoding::{Encoding, Field, HasEncoding, Tag, TagMap};
use tezos_messages::p2p::encoding::prelude::{BlockHeader, Operation, OperationsForBlocksMessage, Path};
use tezos_messages::p2p::encoding::operations_for_blocks::path_encoding;
pub type RustBytes = Vec<u8>;
/// Trait for binary encoding messages for ffi.
pub trait FfiMessage: DeserializeOwned + Serialize + Sized + Send + PartialEq + Debug {
#[inline]
fn as_rust_bytes(&self) -> Result<RustBytes, ser::Error> {
binary_writer::write(&self, Self::encoding())
}
/// Create new struct from bytes.
#[inline]
fn from_rust_bytes(buf: RustBytes) -> Result<Self, BinaryReaderError> {
let value = BinaryReader::new().read(buf, Self::encoding())?;
let value: Self = deserialize_from_value(&value)?;
Ok(value)
}
fn encoding() -> &'static Encoding;
}
/// Genesis block information structure
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct GenesisChain {
pub time: String,
pub block: String,
pub protocol: String,
}
/// Voted protocol overrides
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct ProtocolOverrides {
pub forced_protocol_upgrades: Vec<(i32, String)>,
pub voted_protocol_overrides: Vec<(String, String)>,
}
/// Patch_context key json
#[derive(Clone, Serialize, Deserialize)]
pub struct PatchContext {
pub key: String,
pub json: String,
}
impl fmt::Debug for PatchContext {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "key: {}, json: {:?}", &self.key, &self.json)
}
}
/// Test chain information
#[derive(Debug, Serialize, Deserialize)]
pub struct TestChain {
pub chain_id: RustBytes,
pub protocol_hash: RustBytes,
pub expiration_date: String,
}
/// Holds configuration for ocaml runtime - e.g. arguments which are passed to ocaml and can be change in runtime
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct TezosRuntimeConfiguration {
pub log_enabled: bool,
pub no_of_ffi_calls_treshold_for_gc: i32,
pub debug_mode: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq)]
pub struct ApplyBlockRequest {
pub chain_id: ChainId,
pub block_header: BlockHeader,
pub pred_header: BlockHeader,
pub max_operations_ttl: i32,
pub operations: Vec<Vec<Operation>>,
}
impl ApplyBlockRequest {
pub fn convert_operations(block_operations: Vec<OperationsForBlocksMessage>) -> Vec<Vec<Operation>> {
block_operations
.into_iter()
.map(|ops| ops.operations)
.collect()
}
}
lazy_static! {
pub static ref APPLY_BLOCK_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("chain_id", Encoding::Hash(HashType::ChainId)),
Field::new("block_header", Encoding::dynamic(BlockHeader::encoding().clone())),
Field::new("pred_header", Encoding::dynamic(BlockHeader::encoding().clone())),
Field::new("max_operations_ttl", Encoding::Int31),
Field::new("operations", Encoding::dynamic(Encoding::list(Encoding::dynamic(Encoding::list(Encoding::dynamic(Operation::encoding().clone())))))),
]);
}
impl FfiMessage for ApplyBlockRequest {
fn encoding() -> &'static Encoding {
&APPLY_BLOCK_REQUEST_ENCODING
}
}
/// Application block result
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct ApplyBlockResponse {
pub validation_result_message: String,
pub context_hash: ContextHash,
pub block_header_proto_json: String,
pub block_header_proto_metadata_json: String,
pub operations_proto_metadata_json: String,
pub max_operations_ttl: i32,
pub last_allowed_fork_level: i32,
pub forking_testchain: bool,
pub forking_testchain_data: Option<ForkingTestchainData>,
}
lazy_static! {
pub static ref FORKING_TESTCHAIN_DATA_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("forking_block_hash", Encoding::Hash(HashType::BlockHash)),
Field::new("test_chain_id", Encoding::Hash(HashType::ChainId)),
]);
pub static ref APPLY_BLOCK_RESPONSE_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("validation_result_message", Encoding::String),
Field::new("context_hash", Encoding::Hash(HashType::ContextHash)),
Field::new("block_header_proto_json", Encoding::String),
Field::new("block_header_proto_metadata_json", Encoding::String),
Field::new("operations_proto_metadata_json", Encoding::String),
Field::new("max_operations_ttl", Encoding::Int31),
Field::new("last_allowed_fork_level", Encoding::Int32),
Field::new("forking_testchain", Encoding::Bool),
Field::new("forking_testchain_data", Encoding::option(FORKING_TESTCHAIN_DATA_ENCODING.clone())),
]);
}
impl FfiMessage for ApplyBlockResponse {
fn encoding() -> &'static Encoding {
&APPLY_BLOCK_RESPONSE_ENCODING
}
}
#[derive(Clone, Serialize, Deserialize, PartialEq)]
pub struct PrevalidatorWrapper {
pub chain_id: ChainId,
pub protocol: ProtocolHash,
}
impl fmt::Debug for PrevalidatorWrapper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let chain_hash_encoding = HashType::ChainId;
let protocol_hash_encoding = HashType::ProtocolHash;
write!(f, "PrevalidatorWrapper[chain_id: {}, protocol: {}]",
chain_hash_encoding.bytes_to_string(&self.chain_id),
protocol_hash_encoding.bytes_to_string(&self.protocol)
)
}
}
lazy_static! {
pub static ref PREVALIDATOR_WRAPPER_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("chain_id", Encoding::Hash(HashType::ChainId)),
Field::new("protocol", Encoding::Hash(HashType::ProtocolHash)),
]);
}
impl FfiMessage for PrevalidatorWrapper {
fn encoding() -> &'static Encoding {
&PREVALIDATOR_WRAPPER_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq)]
pub struct BeginConstructionRequest {
pub chain_id: ChainId,
pub predecessor: BlockHeader,
pub protocol_data: Option<Vec<u8>>,
}
lazy_static! {
pub static ref BEGIN_CONSTRUCTION_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("chain_id", Encoding::Hash(HashType::ChainId)),
Field::new("predecessor", Encoding::dynamic(BlockHeader::encoding().clone())),
Field::new("protocol_data", Encoding::option(Encoding::list(Encoding::Uint8))),
]);
}
impl FfiMessage for BeginConstructionRequest {
fn encoding() -> &'static Encoding {
&BEGIN_CONSTRUCTION_REQUEST_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq)]
pub struct ValidateOperationRequest {
pub prevalidator: PrevalidatorWrapper,
pub operation: Operation,
}
lazy_static! {
pub static ref VALIDATE_OPERATION_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("prevalidator", PREVALIDATOR_WRAPPER_ENCODING.clone()),
Field::new("operation", Encoding::dynamic(Operation::encoding().clone())),
]);
}
impl FfiMessage for ValidateOperationRequest {
fn encoding() -> &'static Encoding {
&VALIDATE_OPERATION_REQUEST_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq)]
pub struct ValidateOperationResponse {
pub prevalidator: PrevalidatorWrapper,
pub result: ValidateOperationResult,
}
lazy_static! {
pub static ref VALIDATE_OPERATION_RESPONSE_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("prevalidator", PREVALIDATOR_WRAPPER_ENCODING.clone()),
Field::new("result", VALIDATE_OPERATION_RESULT_ENCODING.clone()),
]);
}
impl FfiMessage for ValidateOperationResponse {
fn encoding() -> &'static Encoding {
&VALIDATE_OPERATION_RESPONSE_ENCODING
}
}
pub type OperationProtocolDataJson = String;
pub type ErrorListJson = String;
#[derive(Serialize, Deserialize, Clone, Builder, PartialEq)]
pub struct OperationProtocolDataJsonWithErrorListJson {
pub protocol_data_json: OperationProtocolDataJson,
pub error_json: ErrorListJson,
}
impl fmt::Debug for OperationProtocolDataJsonWithErrorListJson {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[error_json: {}, protocol_data_json: {}]",
format_json_single_line(&self.error_json),
format_json_single_line(&self.protocol_data_json)
)
}
}
#[derive(Serialize, Deserialize, Clone, Builder, PartialEq)]
pub struct Applied {
pub hash: OperationHash,
pub protocol_data_json: OperationProtocolDataJson,
}
#[inline]
fn format_json_single_line(origin: &str) -> String {
let json = serde_json::json!(origin);
serde_json::to_string(&json).unwrap_or_else(|_| origin.to_string())
}
impl fmt::Debug for Applied {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let operation_hash_encoding = HashType::OperationHash;
write!(f, "[hash: {}, protocol_data_json: {}]",
operation_hash_encoding.bytes_to_string(&self.hash),
format_json_single_line(&self.protocol_data_json)
)
}
}
#[derive(Serialize, Deserialize, Clone, Builder, PartialEq)]
pub struct Errored {
pub hash: OperationHash,
pub is_endorsement: Option<bool>,
pub protocol_data_json_with_error_json: OperationProtocolDataJsonWithErrorListJson,
}
impl fmt::Debug for Errored {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let operation_hash_encoding = HashType::OperationHash;
write!(f, "[hash: {}, protocol_data_json_with_error_json: {:?}]",
operation_hash_encoding.bytes_to_string(&self.hash),
&self.protocol_data_json_with_error_json
)
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq, Default)]
pub struct ValidateOperationResult {
pub applied: Vec<Applied>,
pub refused: Vec<Errored>,
pub branch_refused: Vec<Errored>,
pub branch_delayed: Vec<Errored>,
// TODO: outedate?
}
impl ValidateOperationResult {
/// Merges result with new one, and returns `true/false` if something was changed
pub fn merge(&mut self, new_result: &ValidateOperationResult) -> bool {
let mut changed = self.merge_applied(&new_result.applied);
changed |= self.merge_refused(&new_result.refused);
changed |= self.merge_branch_refused(&new_result.branch_refused);
changed |= self.merge_branch_delayed(&new_result.branch_delayed);
changed
}
fn merge_applied(&mut self, new_items: &[Applied]) -> bool {
let mut changed = false;
let mut added = false;
let mut m = HashMap::new();
for a in &self.applied {
m.insert(a.hash.clone(), a.clone());
}
for na in new_items {
match m.insert(na.hash.clone(), na.clone()) {
Some(_) => changed |= true,
None => added |= true,
};
}
if added || changed {
self.applied = m.values().cloned().collect();
}
added || changed
}
fn merge_refused(&mut self, new_items: &[Errored]) -> bool {
Self::merge_errored(&mut self.refused, new_items)
}
fn merge_branch_refused(&mut self, new_items: &[Errored]) -> bool {
Self::merge_errored(&mut self.branch_refused, new_items)
}
fn merge_branch_delayed(&mut self, new_items: &[Errored]) -> bool {
Self::merge_errored(&mut self.branch_delayed, new_items)
}
fn merge_errored(old_items: &mut Vec<Errored>, new_items: &[Errored]) -> bool {
let mut changed = false;
let mut added = false;
let mut m = HashMap::new();
for a in old_items.iter_mut() {
m.insert(a.hash.clone(), (*a).clone());
}
for na in new_items {
match m.insert(na.hash.clone(), na.clone()) {
Some(_) => changed |= true,
None => added |= true,
};
}
if added || changed {
*old_items = m.values().cloned().collect();
}
added || changed
}
}
lazy_static! {
static ref OPERATION_DATA_ERROR_JSON_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("protocol_data_json", Encoding::String),
Field::new("error_json", Encoding::String),
]);
pub static ref VALIDATE_OPERATION_RESULT_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("applied", Encoding::dynamic(Encoding::list(
Encoding::Obj(
vec![
Field::new("hash", Encoding::Hash(HashType::OperationHash)),
Field::new("protocol_data_json", Encoding::String),
]
)
))
),
Field::new("refused", Encoding::dynamic(Encoding::list(
Encoding::Obj(
vec![
Field::new("hash", Encoding::Hash(HashType::OperationHash)),
Field::new("is_endorsement", Encoding::option(Encoding::Bool)),
Field::new("protocol_data_json_with_error_json", OPERATION_DATA_ERROR_JSON_ENCODING.clone()),
]
)
))
),
Field::new("branch_refused", Encoding::dynamic(Encoding::list(
Encoding::Obj(
vec![
Field::new("hash", Encoding::Hash(HashType::OperationHash)),
Field::new("is_endorsement", Encoding::option(Encoding::Bool)),
Field::new("protocol_data_json_with_error_json", OPERATION_DATA_ERROR_JSON_ENCODING.clone()),
]
)
))
),
Field::new("branch_delayed", Encoding::dynamic(Encoding::list(
Encoding::Obj(
vec![
Field::new("hash", Encoding::Hash(HashType::OperationHash)),
Field::new("is_endorsement", Encoding::option(Encoding::Bool)),
Field::new("protocol_data_json_with_error_json", OPERATION_DATA_ERROR_JSON_ENCODING.clone()),
]
)
))
),
]);
}
/// Init protocol context result
#[derive(Clone, Serialize, Deserialize, PartialEq)]
pub struct InitProtocolContextResult {
pub supported_protocol_hashes: Vec<ProtocolHash>,
/// Presents only if was genesis commited to context
pub genesis_commit_hash: Option<ContextHash>,
}
impl fmt::Debug for InitProtocolContextResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let genesis_commit_hash = match &self.genesis_commit_hash {
Some(hash) => HashType::ContextHash.bytes_to_string(hash),
None => "-none-".to_string()
};
let supported_protocol_hashes = self.supported_protocol_hashes
.iter()
.map(|ph| HashType::ProtocolHash.bytes_to_string(ph))
.collect::<Vec<String>>();
write!(f, "genesis_commit_hash: {}, supported_protocol_hashes: {:?}", &genesis_commit_hash, &supported_protocol_hashes)
}
}
/// Commit genesis result
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct CommitGenesisResult {
pub block_header_proto_json: String,
pub block_header_proto_metadata_json: String,
pub operations_proto_metadata_json: String,
}
/// Forking test chain data
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
pub struct ForkingTestchainData {
pub forking_block_hash: BlockHash,
pub test_chain_id: ChainId,
}
#[derive(Serialize, Deserialize, Debug, Fail, PartialEq)]
pub enum CallError {
#[fail(display = "Failed to call - message: {:?}!", parsed_error_message)]
FailedToCall {
parsed_error_message: Option<String>,
},
#[fail(display = "Invalid request data - message: {}!", message)]
InvalidRequestData {
message: String,
},
#[fail(display = "Invalid response data - message: {}!", message)]
InvalidResponseData {
message: String,
},
}
impl From<OCamlError> for CallError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
match exception.message() {
None => CallError::FailedToCall {
parsed_error_message: None
},
Some(message) => {
CallError::FailedToCall {
parsed_error_message: Some(message)
}
}
}
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail)]
pub enum TezosRuntimeConfigurationError {
#[fail(display = "Change ocaml settings failed, message: {}!", message)]
ChangeConfigurationError {
message: String
}
}
impl From<OCamlError> for TezosRuntimeConfigurationError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
TezosRuntimeConfigurationError::ChangeConfigurationError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail)]
pub enum TezosGenerateIdentityError {
#[fail(display = "Generate identity failed, message: {}!", message)]
GenerationError {
message: String
},
#[fail(display = "Generated identity is invalid json! message: {}!", message)]
InvalidJsonError {
message: String
},
}
impl From<OCamlError> for TezosGenerateIdentityError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
TezosGenerateIdentityError::GenerationError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail)]
pub enum TezosStorageInitError {
#[fail(display = "Ocaml storage init failed, message: {}!", message)]
InitializeError {
message: String
}
}
impl From<OCamlError> for TezosStorageInitError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
TezosStorageInitError::InitializeError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
impl slog::Value for TezosStorageInitError {
fn serialize(&self, _record: &slog::Record, key: slog::Key, serializer: &mut dyn slog::Serializer) -> slog::Result {
serializer.emit_arguments(key, &format_args!("{}", self))
}
}
#[derive(Serialize, Deserialize, Debug, Fail)]
pub enum GetDataError {
#[fail(display = "Ocaml failed to get data, message: {}!", message)]
ReadError {
message: String
}
}
impl From<OCamlError> for GetDataError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
GetDataError::ReadError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail, PartialEq)]
pub enum ApplyBlockError {
#[fail(display = "Incomplete operations, exptected: {}, has actual: {}!", expected, actual)]
IncompleteOperations {
expected: usize,
actual: usize,
},
#[fail(display = "Failed to apply block - message: {}!", message)]
FailedToApplyBlock {
message: String,
},
#[fail(display = "Unknown predecessor context - try to apply predecessor at first message: {}!", message)]
UnknownPredecessorContext {
message: String,
},
#[fail(display = "Predecessor does not match - message: {}!", message)]
PredecessorMismatch {
message: String,
},
#[fail(display = "Invalid request/response data - message: {}!", message)]
InvalidRequestResponseData {
message: String,
},
}
impl From<CallError> for ApplyBlockError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { parsed_error_message } => {
match parsed_error_message {
None => ApplyBlockError::FailedToApplyBlock {
message: "unknown".to_string()
},
Some(message) => {
match message.as_str() {
e if e.starts_with("Unknown_predecessor_context") => ApplyBlockError::UnknownPredecessorContext {
message: message.to_string()
},
e if e.starts_with("Predecessor_mismatch") => ApplyBlockError::PredecessorMismatch {
message: message.to_string()
},
message => ApplyBlockError::FailedToApplyBlock {
message: message.to_string()
}
}
}
}
}
CallError::InvalidRequestData { message } => ApplyBlockError::InvalidRequestResponseData {
message
},
CallError::InvalidResponseData { message } => ApplyBlockError::InvalidRequestResponseData {
message
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail, PartialEq)]
pub enum BeginConstructionError {
#[fail(display = "Failed to begin construction - message: {}!", message)]
FailedToBeginConstruction {
message: String,
},
#[fail(display = "Unknown predecessor context - try to apply predecessor at first message: {}!", message)]
UnknownPredecessorContext {
message: String,
},
#[fail(display = "Invalid request/response data - message: {}!", message)]
InvalidRequestResponseData {
message: String,
},
}
impl From<CallError> for BeginConstructionError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { parsed_error_message } => {
match parsed_error_message {
None => BeginConstructionError::FailedToBeginConstruction {
message: "unknown".to_string()
},
Some(message) => {
match message.as_str() {
e if e.starts_with("Unknown_predecessor_context") => BeginConstructionError::UnknownPredecessorContext {
message: message.to_string()
},
message => BeginConstructionError::FailedToBeginConstruction {
message: message.to_string()
}
}
}
}
}
CallError::InvalidRequestData { message } => BeginConstructionError::InvalidRequestResponseData {
message
},
CallError::InvalidResponseData { message } => BeginConstructionError::InvalidRequestResponseData {
message
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Fail, PartialEq)]
pub enum ValidateOperationError {
#[fail(display = "Failed to validate operation - message: {}!", message)]
FailedToValidateOperation {
message: String,
},
#[fail(display = "Invalid request/response data - message: {}!", message)]
InvalidRequestResponseData {
message: String,
},
}
impl From<CallError> for ValidateOperationError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { parsed_error_message } => {
match parsed_error_message {
None => ValidateOperationError::FailedToValidateOperation {
message: "unknown".to_string()
},
Some(message) => {
ValidateOperationError::FailedToValidateOperation {
message
}
}
}
}
CallError::InvalidRequestData { message } => ValidateOperationError::InvalidRequestResponseData {
message
},
CallError::InvalidResponseData { message } => ValidateOperationError::InvalidRequestResponseData {
message
},
}
}
}
#[derive(Debug, Fail)]
pub enum BlockHeaderError {
#[fail(display = "BlockHeader cannot be read from storage: {}!", message)]
ReadError {
message: String
},
#[fail(display = "BlockHeader was expected, but was not found!")]
ExpectedButNotFound,
}
impl From<OCamlError> for BlockHeaderError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
BlockHeaderError::ReadError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
#[derive(Debug, Fail)]
pub enum ContextDataError {
#[fail(display = "Resolve/decode context data failed to decode: {}!", message)]
DecodeError {
message: String
},
}
impl From<OCamlError> for ContextDataError {
fn from(error: OCamlError) -> Self {
match error {
OCamlError::Exception(exception) => {
ContextDataError::DecodeError {
message: exception.message().unwrap_or_else(|| "unknown".to_string())
}
}
}
}
}
pub type Json = String;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct JsonRpcRequest {
pub body: Json,
pub context_path: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct JsonRpcResponse {
pub body: Json
}
lazy_static! {
pub static ref JSON_RPC_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("body", Encoding::String),
Field::new("context_path", Encoding::String),
]);
pub static ref JSON_RPC_RESPONSE_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("body", Encoding::String),
]);
}
impl FfiMessage for JsonRpcResponse {
fn encoding() -> &'static Encoding {
&JSON_RPC_RESPONSE_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Builder, PartialEq)]
pub struct ProtocolJsonRpcRequest {
pub block_header: BlockHeader,
pub chain_arg: String,
pub chain_id: ChainId,
pub request: JsonRpcRequest,
// TODO: TE-140 - will be removed, when router is done
pub ffi_service: FfiRpcService,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum FfiRpcService {
HelpersRunOperation,
HelpersPreapplyOperations,
HelpersPreapplyBlock,
HelpersCurrentLevel,
DelegatesMinimalValidTime,
HelpersForgeOperations,
ContextContract,
}
lazy_static! {
pub static ref PROTOCOL_JSON_RPC_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("block_header", Encoding::dynamic(BlockHeader::encoding().clone())),
Field::new("chain_arg", Encoding::String),
Field::new("chain_id", Encoding::Hash(HashType::ChainId)),
Field::new("request", JSON_RPC_REQUEST_ENCODING.clone()),
Field::new("ffi_service", Encoding::Tags(
size_of::<u16>(),
TagMap::new(vec![
Tag::new(0, "HelpersRunOperation", Encoding::Unit),
Tag::new(1, "HelpersPreapplyOperations", Encoding::Unit),
Tag::new(2, "HelpersPreapplyBlock", Encoding::Unit),
Tag::new(3, "HelpersCurrentLevel", Encoding::Unit),
Tag::new(4, "DelegatesMinimalValidTime", Encoding::Unit),
Tag::new(5, "HelpersForgeOperations", Encoding::Unit),
Tag::new(6, "ContextContract", Encoding::Unit),
]),
)
),
]);
}
impl FfiMessage for ProtocolJsonRpcRequest {
fn encoding() -> &'static Encoding {
&PROTOCOL_JSON_RPC_REQUEST_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Fail, PartialEq)]
pub enum ProtocolRpcError {
#[fail(display = "Failed to call protocol rpc - message: {}!", message)]
FailedToCallProtocolRpc {
message: String,
},
#[fail(display = "Invalid request data - message: {}!", message)]
InvalidRequestData {
message: String,
},
#[fail(display = "Invalid response data - message: {}!", message)]
InvalidResponseData {
message: String,
},
}
impl From<CallError> for ProtocolRpcError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { parsed_error_message } => {
match parsed_error_message {
None => ProtocolRpcError::FailedToCallProtocolRpc {
message: "unknown".to_string()
},
Some(message) => {
ProtocolRpcError::FailedToCallProtocolRpc {
message
}
}
}
}
CallError::InvalidRequestData { message } => ProtocolRpcError::InvalidRequestData {
message
},
CallError::InvalidResponseData { message } => ProtocolRpcError::InvalidResponseData {
message
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct ComputePathRequest {
pub operations: Vec<Vec<OperationHash>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
pub struct ComputePathResponse {
pub operations_hashes_path: Vec<Path>,
}
lazy_static! {
pub static ref COMPUTE_PATH_REQUEST_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("operations", Encoding::dynamic(Encoding::list(Encoding::dynamic(Encoding::list(Encoding::Hash(HashType::OperationHash)))))),
]);
}
lazy_static! {
pub static ref COMPUTE_PATH_RESPONSE_ENCODING: Encoding = Encoding::Obj(vec![
Field::new("operations_hashes_path", Encoding::dynamic(Encoding::list(path_encoding()))),
]);
}
impl FfiMessage for ComputePathRequest {
fn encoding() -> &'static Encoding {
&COMPUTE_PATH_REQUEST_ENCODING
}
}
impl FfiMessage for ComputePathResponse {
fn encoding() -> &'static Encoding {
&COMPUTE_PATH_RESPONSE_ENCODING
}
}
#[derive(Serialize, Deserialize, Debug, Fail)]
pub enum ComputePathError {
#[fail(display = "Path computation failed, message: {}!", message)]
PathError {
message: String
},
#[fail(display = "Path computation failed, message: {}!", message)]
InvalidRequestResponseData {
message: String
},
}
impl From<CallError> for ComputePathError {
fn from(error: CallError) -> Self {
match error {
CallError::FailedToCall { parsed_error_message } => {
match parsed_error_message {
None => ComputePathError::PathError {
message: "unknown".to_string()
},
Some(message) => {
ComputePathError::PathError {
message: message.to_string()
}
}
}
}
CallError::InvalidRequestData { message } => ComputePathError::InvalidRequestResponseData {
message
},
CallError::InvalidResponseData { message } => ComputePathError::InvalidRequestResponseData {
message
},
}
}
} |
#[doc = "Register `ICSR` reader"]
pub type R = crate::R<ICSR_SPEC>;
#[doc = "Register `ICSR` writer"]
pub type W = crate::W<ICSR_SPEC>;
#[doc = "Field `ALRAWF` reader - ALRAWF"]
pub type ALRAWF_R = crate::BitReader;
#[doc = "Field `ALRBWF` reader - ALRBWF"]
pub type ALRBWF_R = crate::BitReader;
#[doc = "Field `WUTWF` reader - WUTWF"]
pub type WUTWF_R = crate::BitReader;
#[doc = "Field `SHPF` reader - SHPF"]
pub type SHPF_R = crate::BitReader;
#[doc = "Field `INITS` reader - INITS"]
pub type INITS_R = crate::BitReader;
#[doc = "Field `RSF` reader - RSF"]
pub type RSF_R = crate::BitReader;
#[doc = "Field `RSF` writer - RSF"]
pub type RSF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `INITF` reader - INITF"]
pub type INITF_R = crate::BitReader;
#[doc = "Field `INIT` reader - INIT"]
pub type INIT_R = crate::BitReader;
#[doc = "Field `INIT` writer - INIT"]
pub type INIT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RECALPF` reader - RECALPF"]
pub type RECALPF_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - ALRAWF"]
#[inline(always)]
pub fn alrawf(&self) -> ALRAWF_R {
ALRAWF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - ALRBWF"]
#[inline(always)]
pub fn alrbwf(&self) -> ALRBWF_R {
ALRBWF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - WUTWF"]
#[inline(always)]
pub fn wutwf(&self) -> WUTWF_R {
WUTWF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - SHPF"]
#[inline(always)]
pub fn shpf(&self) -> SHPF_R {
SHPF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - INITS"]
#[inline(always)]
pub fn inits(&self) -> INITS_R {
INITS_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - RSF"]
#[inline(always)]
pub fn rsf(&self) -> RSF_R {
RSF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - INITF"]
#[inline(always)]
pub fn initf(&self) -> INITF_R {
INITF_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - INIT"]
#[inline(always)]
pub fn init(&self) -> INIT_R {
INIT_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 16 - RECALPF"]
#[inline(always)]
pub fn recalpf(&self) -> RECALPF_R {
RECALPF_R::new(((self.bits >> 16) & 1) != 0)
}
}
impl W {
#[doc = "Bit 5 - RSF"]
#[inline(always)]
#[must_use]
pub fn rsf(&mut self) -> RSF_W<ICSR_SPEC, 5> {
RSF_W::new(self)
}
#[doc = "Bit 7 - INIT"]
#[inline(always)]
#[must_use]
pub fn init(&mut self) -> INIT_W<ICSR_SPEC, 7> {
INIT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is write protected. The write access procedure is described in RTC register write protection on page1830. This register can be globally protected, or each bit of this register can be individually protected against non-secure access. Refer to Section50.3.4: RTC secure protection modes.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`icsr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icsr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ICSR_SPEC;
impl crate::RegisterSpec for ICSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`icsr::R`](R) reader structure"]
impl crate::Readable for ICSR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`icsr::W`](W) writer structure"]
impl crate::Writable for ICSR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ICSR to value 0x07"]
impl crate::Resettable for ICSR_SPEC {
const RESET_VALUE: Self::Ux = 0x07;
}
|
use futures_lite::stream::StreamExt;
use lapin::{
options::*,
types::{AMQPValue, FieldTable},
Connection, ConnectionProperties, ExchangeKind, Result,
};
use tracing::info;
fn main() -> Result<()> {
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", "info");
}
tracing_subscriber::fmt::init();
let addr = std::env::var("AMQP_ADDR").unwrap_or_else(|_| "amqp://127.0.0.1:5672/%2f".into());
async_global_executor::block_on(async {
let conn = Connection::connect(&addr, ConnectionProperties::default()).await?;
info!("CONNECTED");
let channel_a = conn.create_channel().await?;
let channel_b = conn.create_channel().await?;
let queue = channel_a
.queue_declare(
"hello",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await?;
info!(?queue, "Declared queue");
channel_a
.exchange_declare(
"test-exchange",
ExchangeKind::Direct,
ExchangeDeclareOptions::default(),
FieldTable::default(),
)
.await?;
channel_a
.queue_bind(
queue.name().as_str(),
"test-exchange",
"test-rk",
QueueBindOptions::default(),
FieldTable::default(),
)
.await?;
let mut dloptions = FieldTable::default();
dloptions.insert("x-message-ttl".into(), AMQPValue::LongUInt(2000));
dloptions.insert(
"x-dead-letter-exchange".into(),
AMQPValue::LongString("test-exchange".into()),
);
dloptions.insert(
"x-dead-letter-routing-key".into(),
AMQPValue::LongString("test-rk".into()),
);
channel_a
.queue_declare("trash-queue", QueueDeclareOptions::default(), dloptions)
.await?;
let mut consumer = channel_b
.basic_consume(
"hello",
"my_consumer",
BasicConsumeOptions::default(),
FieldTable::default(),
)
.await?;
async_global_executor::spawn(async move {
info!("will consume");
while let Some(delivery) = consumer.next().await {
let delivery = delivery.expect("error in consumer");
delivery.ack(BasicAckOptions::default()).await.expect("ack");
}
})
.detach();
println!(
"Topology: {}",
serde_json::to_string_pretty(&conn.topology()).unwrap()
);
Ok(())
})
}
|
mod proc_service;
mod thread_db;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
pub use thread_db::{TdErr, TdTaStats, TdThrInfo};
use thread_db::{TdThrAgent, TdThrHandle, TdThrState};
use proc_service::ProcHandle;
use dlopen::wrapper::Container;
/// Runs a libthread_db function, returning on error.
macro_rules! td_try {
($e: expr) => {
match $e {
TdErr::Ok => (),
err => return Err(err),
}
}
}
pub struct Library {
api: Container<thread_db::ThreadDb>,
}
impl Library {
pub fn new() -> Library {
Library {
api: thread_db::open_lib(),
}
}
pub fn attach(&self, pid: i32) -> Result<Process, TdErr> {
let symbols = match get_symbols(pid) {
Ok(s) => s,
Err(e) => {
eprintln!("get_symbols: {:?}", e);
return Err(TdErr::Err);
}
};
let mut handle = match ProcHandle::new(pid) {
Ok(h) => Box::new(h),
Err(e) => {
eprintln!("could not attach to process: {:?}", e);
return Err(TdErr::Err);
}
};
handle.symbols = symbols;
let mut ta: *mut TdThrAgent = std::ptr::null_mut();
unsafe {
// Initialize libthread_db.
td_try!(self.api.td_ta_new(handle.as_mut(), &mut ta));
}
Ok(Process { lib: &self, handle, ta })
}
}
/// Returns a map of mapped symbols in the process with the given pid.
fn get_symbols(pid: i32) -> Result<HashMap<String, usize>, Box<std::error::Error>> {
// Result map.
let mut symbols = HashMap::new();
// The mappings for libpthread look like this:
//
// 7ffff7f78000-7ffff7f7e000 r--p 00000000 fd:01 10893944 /usr/lib64/libpthread-2.28.so
// 7ffff7f7e000-7ffff7f8e000 r-xp 00006000 fd:01 10893944 /usr/lib64/libpthread-2.28.so
// 7ffff7f8e000-7ffff7f94000 r--p 00016000 fd:01 10893944 /usr/lib64/libpthread-2.28.so
// 7ffff7f94000-7ffff7f95000 r--p 0001b000 fd:01 10893944 /usr/lib64/libpthread-2.28.so
// 7ffff7f95000-7ffff7f96000 rw-p 0001c000 fd:01 10893944 /usr/lib64/libpthread-2.28.so
// 7ffff7f96000-7ffff7f9a000 rw-p 00000000 00:00 0
//
// Some symbols (like __nptl_nthreads at 0x1d2e0 in the ELF file) end up in the last anonymous
// mapping region, even if they're in the .data section (in the example, __nptl_nthreads ends
// up at 0x77ffff7f952e0). To account for these symbols, the following code doesn't try to
// understand any mappings other than the first (with offset 0).
//
// See also this Stackoverflow question: https://stackoverflow.com/questions/25274569/
for map in proc_maps::get_process_maps(pid)? {
// We're only interested in the first entry for each library.
if map.offset > 0 || map.filename().is_none() {
continue;
}
let filename = map.filename().as_ref().unwrap();
// We can only read files, skip mappings to [stack] etc.
if !filename.starts_with("/") {
continue;
}
for (symbol, offset) in get_symbols_for_library(&filename)? {
symbols.insert(symbol.to_string(), offset + map.start());
}
}
Ok(symbols)
}
/// Returns a map with all symbols defined in the given library.
fn get_symbols_for_library(filename: &str) -> Result<HashMap<String, usize>, Box<std::error::Error>> {
let mut symbols = HashMap::new();
eprintln!("reading library {}", filename);
let mut f = match File::open(filename) {
Ok(f) => f,
//
Err(e) => {
use std::error::Error;
eprintln!("get_symbols_for_library: couldn't read {}: {}", filename, e.description());
return Ok(symbols);
}
};
let mut buf: Vec<u8> = Vec::new();
f.read_to_end(&mut buf)?;
let binary = goblin::elf::Elf::parse(&buf)?;
for sym in binary.syms.iter() {
if let Some(name) = binary.strtab.get_unsafe(sym.st_name) {
// Only keep symbols that start with a letter to keep the symbol hashmap small.
let first_char = name.chars().next().unwrap_or('\0');
if first_char.is_alphabetic() || first_char == '_' {
symbols.insert(name.to_string(), sym.st_value as usize);
}
}
}
Ok(symbols)
}
pub struct Process<'a> {
lib: &'a Library,
// handle needs to be boxed so that the pointer that libthread_db keeps stays valid even if
// Process is moved on the Rust side.
handle: Box<ProcHandle>,
ta: *mut TdThrAgent,
}
impl Process<'_> {
/// Get number of currently running threads in process associated with TA.
pub fn get_nthreads(&self) -> Result<i32, TdErr> {
let mut result: i32 = 42;
unsafe {
td_try!(self.lib.api.td_ta_get_nthreads(self.ta, &mut result));
}
Ok(result)
}
/// Enable collecting statistics for process associated with TA.
/// *Note*: Not implemented in glibc.
pub fn enable_stats(&mut self, enable: bool) -> Result<(), TdErr> {
unsafe {
td_try!(self.lib.api.td_ta_enable_stats(self.ta, enable as i32));
}
Ok(())
}
/// Reset statistics.
/// *Note*: Not implemented in glibc.
pub fn reset_stats(&mut self) -> Result<(), TdErr> {
unsafe {
td_try!(self.lib.api.td_ta_reset_stats(self.ta));
}
Ok(())
}
/// Retrieve statistics from process associated with TA.
/// *Note*: Not implemented in glibc.
pub fn get_stats(&self) -> Result<TdTaStats, TdErr> {
let mut result: TdTaStats = Default::default();
unsafe {
td_try!(self.lib.api.td_ta_get_stats(self.ta, &mut result));
}
Ok(result)
}
/// Get all threads.
pub fn threads(&self) -> Result<Vec<Thread>, TdErr> {
// The td_ta_thr_iter function will call the callback function for each thread. Save the
// results in a Vec so that we can iterate over it.
let mut handles: Vec<TdThrHandle> = Vec::new();
unsafe {
let sigmask = nix::sys::signal::SigSet::empty();
let mut c_sigmask = sigmask.as_ref().clone();
td_try!(self.lib.api.td_ta_thr_iter(self.ta, thr_iter_callback, &mut handles as *mut _ as *mut libc::c_void, TdThrState::AnyState, 0, &mut c_sigmask, 0));
}
Ok(handles.iter().map(|handle| Thread { lib: self.lib, handle: *handle }).collect())
}
}
/// Appends the thread handle to the Vec<Process> in cbdata.
unsafe extern "C" fn thr_iter_callback(handle: *const TdThrHandle, cbdata: *mut libc::c_void) -> i32 {
let threads = cbdata as *mut Vec<TdThrHandle>;
(*threads).push(*handle);
0
}
impl Drop for Process<'_> {
fn drop(&mut self) {
unsafe {
match self.lib.api.td_ta_delete(self.ta) {
TdErr::Ok => (),
err => panic!("Deleting Process with pid {} failed: {:?}", self.handle.pid, err),
}
}
}
}
pub struct Thread<'a> {
lib: &'a Library,
handle: TdThrHandle,
}
impl Thread<'_> {
/// Validate that this is a thread handle.
pub fn validate(&self) -> Result<(), TdErr> {
unsafe {
td_try!(self.lib.api.td_thr_validate(&self.handle));
}
Ok(())
}
/// Return information about the thread.
pub fn info(&self) -> Result<TdThrInfo, TdErr> {
unsafe {
let mut info: TdThrInfo = std::mem::zeroed();
td_try!(self.lib.api.td_thr_get_info(&self.handle, &mut info));
Ok(info)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::{Command, Stdio};
use std::io::{BufRead, BufReader};
/// Read symbols from the test process and compare to the symbols gdb reads.
#[test]
fn test_get_symbols() {
use nix::unistd::{fork, ForkResult};
// We need to fork because gdb will stop the process while reading the symbols, preventing
// us from capturing its output.
match fork().unwrap() {
ForkResult::Child => {
std::thread::sleep(std::time::Duration::from_millis(2000));
std::process::exit(0);
},
ForkResult::Parent { child, .. } => {
let pid = child.as_raw();
let symbols = get_symbols(pid as i32).expect("could not get symbols");
let gdb_symbols = get_symbols_gdb(pid as i32).expect("could not get gdb symbols");
println!("#symbols = {}, #gdb_symbols = {}", symbols.len(), gdb_symbols.len());
let mut checked_symbols = 0;
for (symbol, offset) in gdb_symbols {
if symbol.contains("nptl") || symbol.contains("_thread_db") {
assert_eq!(symbols[&symbol], offset, "symbol {} does not match: {:x} != {:x}", symbol, symbols[&symbol], offset);
checked_symbols += 1;
}
}
dbg!(checked_symbols);
}
}
}
fn get_symbols_gdb(pid: i32) -> Result<HashMap<String, usize>, Box<std::error::Error>> {
let mut result = HashMap::new();
eprintln!("starting gdb");
let child = Command::new("gdb")
.arg(format!("--pid={}", pid))
.arg("--batch")
.arg("-ex").arg("info variables")
.stdout(Stdio::piped())
.spawn()?;
let reader = BufReader::new(child.stdout.unwrap());
for line in reader.lines().filter_map(|line| line.ok()) {
let tokens: Vec<&str> = line.split_whitespace().collect();
// Filter unrelated gdb output by searching for lines with a number and some other
// word.
if tokens.len() == 2 && tokens[0].starts_with("0x") {
result.insert(tokens[1].to_string(), usize::from_str_radix(&tokens[0][2..], 16)?);
}
}
Ok(result)
}
}
|
use nom::character::complete::char;
use nom::combinator::map;
use nom::sequence::tuple;
use nom::IResult;
const INPUT: &str = include_str!("../inputs/day_10_input");
fn parse_num_pair(i: &str) -> IResult<&str, (i32, i32)> {
use nom::character::complete::{space0, space1};
Ok(map(
tuple((
char('<'),
space0,
parse_num,
char(','),
space1,
parse_num,
char('>'),
)),
|(_, _, x, _, _, y, _): (_, _, i32, _, _, i32, _)| (x, y),
)(i)?)
}
fn parse_num(i: &str) -> IResult<&str, i32> {
use nom::character::complete::digit1;
use nom::combinator::opt;
Ok(map(
tuple((opt(char('-')), digit1)),
|(sign, num): (_, &str)| {
if let Some(_) = sign {
-(num.parse::<i32>().unwrap())
} else {
num.parse().unwrap()
}
},
)(i)?)
}
#[derive(Debug)]
struct Point {
position: (i32, i32),
velocity: (i32, i32),
}
impl Point {
fn parse(i: &str) -> IResult<&str, Self> {
use nom::bytes::complete::tag;
let (i, point) = map(
tuple((
tag("position="),
parse_num_pair,
tag(" velocity="),
parse_num_pair,
)),
|(_, position, _, velocity): (_, (i32, i32), _, _)| Self { position, velocity },
)(i)?;
Ok((i, point))
}
}
pub fn solve() {
let mut points = INPUT
.lines()
.map(|i| Point::parse(i).unwrap().1)
.collect::<Vec<_>>();
let mut min_delta = i32::MAX;
let mut min_step = 0;
for i in 0..80000 {
advance_points(&mut points);
let (min, max) = find_extrema(&points);
let delta = (max.0 - min.0 + 1) + (max.1 - min.1 + 1);
if delta < min_delta {
min_delta = delta;
min_step = i;
}
}
let mut points = INPUT
.lines()
.map(|i| Point::parse(i).unwrap().1)
.collect::<Vec<_>>();
for _ in 0..=min_step {
advance_points(&mut points);
}
let (min, max) = find_extrema(&points);
for x in min.1..=max.1 {
for y in min.0..=max.0 {
let mut contains = false;
for point in &points {
if point.position == (y, x) {
contains = true;
break;
}
}
if contains {
print!("#");
} else {
print!(" ");
}
}
print!("\n");
}
}
fn advance_points(points: &mut [Point]) {
for point in points {
point.position.0 += point.velocity.0;
point.position.1 += point.velocity.1;
}
}
fn find_extrema(points: &[Point]) -> ((i32, i32), (i32, i32)) {
let mut min = (i32::MAX, i32::MAX);
let mut max = (i32::MIN, i32::MIN);
for point in points {
min.0 = min.0.min(point.position.0);
max.0 = max.0.max(point.position.0);
min.1 = min.1.min(point.position.1);
max.1 = max.1.max(point.position.1);
}
(min, max)
}
pub fn solve_extra() {
let mut points = INPUT
.lines()
.map(|i| Point::parse(i).unwrap().1)
.collect::<Vec<_>>();
let mut min_delta = i32::MAX;
let mut min_step = 0;
for i in 0..80000 {
advance_points(&mut points);
let (min, max) = find_extrema(&points);
let delta = (max.0 - min.0 + 1) + (max.1 - min.1 + 1);
if delta < min_delta {
min_delta = delta;
min_step = i;
}
}
println!("Min steps: {}", min_step + 1);
}
|
use inkwell::values::{IntValue, PointerValue};
pub struct Environment {
variables: Vec<(String, Variable)>,
}
impl Environment {
pub fn new() -> Environment {
let variables: Vec<(String, Variable)> = Vec::new();
Environment { variables }
}
pub fn get(&self, skey: &String) -> Option<Variable> {
match self.variables.iter().rev().find(|x| &x.0 == skey) {
Some(val) => Some(val.1.clone()),
None => None,
}
}
fn find(&self, skey: &String) -> Option<usize> {
match self.variables.iter().rev().position(|x| &x.0 == skey) {
Some(idx) => Some(self.variables.len() - idx - 1),
None => None,
}
}
pub fn update(&mut self, skey: String, sval: Variable) {
match self.find(&skey) {
Some(idx) => self.variables[idx] = (skey, sval),
None => self.variables.push((skey, sval)),
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum Variable {
Int(IntVariable),
Array(ArrayVariable),
Null,
}
#[derive(Debug, PartialEq, Clone)]
pub struct IntVariable {
pub name: String,
pub pointer: PointerValue,
}
#[derive(Debug, PartialEq, Clone)]
pub struct ArrayVariable {
pub name: String,
pub pointer: PointerValue,
}
#[derive(Debug, PartialEq, Clone)]
pub enum Value {
Int(IntValue),
Pointer(PointerValue),
Null,
}
impl Value {
pub fn get_pointer(self) -> Result<PointerValue, String> {
match self {
Value::Pointer(pointer) => Ok(pointer),
_ => Err("this is not a pointer value".to_string()),
}
}
pub fn get_int(self) -> Result<IntValue, String> {
match self {
Value::Int(value) => Ok(value),
_ => Err("this is not a value value".to_string()),
}
}
}
|
pub mod upgrade;
mod sender;
pub use sender::Sender;
|
use std::{mem, string::ToString};
use failure::{format_err, Error};
const TOKEN_TYPE_REFERENCE_ID: u8 = 0x01;
const TOKEN_TYPE_ATTRIBUTE_REFERENCE_ID: u8 = 0x02;
const TOKEN_TYPE_STRING: u8 = 0x03;
const TOKEN_TYPE_FLOAT: u8 = 0x04;
const TOKEN_TYPE_DIMENSION: u8 = 0x05;
const TOKEN_TYPE_FRACTION: u8 = 0x06;
const TOKEN_TYPE_DYN_REFERENCE: u8 = 0x07;
const TOKEN_TYPE_DYN_ATTRIBUTE: u8 = 0x08;
const TOKEN_TYPE_INTEGER: u8 = 0x10;
const TOKEN_TYPE_FLAGS: u8 = 0x11;
const TOKEN_TYPE_BOOLEAN: u8 = 0x12;
const TOKEN_TYPE_ARGB8: u8 = 0x1C;
const TOKEN_TYPE_RGB8: u8 = 0x1D;
const TOKEN_TYPE_ARGB4: u8 = 0x1E;
const TOKEN_TYPE_RGB4: u8 = 0x1F;
#[derive(Debug)]
/// Represents a value on the binary documents. It is formed by a type and a 32 bits payload. The
/// payloads are interpreted depending on the type.
pub enum Value {
/// Represents an index on a `StringTable`
StringReference(u32),
/// Represents a dimension. Bits [31..8] represents the numeric value. Bits [7..4] is an
/// index on a lookup table that modified the numeric value. Bits [3..0] is an index on a
/// dimensions lookup table
Dimension(String),
/// Represents a fraction. Bits [31..8] represents the numeric value. Bits [7..4] seems to be
/// unused. Bits [3..0] is an index on a units lookup table
Fraction(String),
/// Represents a float value
Float(f32),
/// Represents an integer value
Integer(u32),
/// Integer value that should be interpreted as a bit flag array
Flags(u32),
/// Represents a boolean value
Boolean(bool),
/// Represents a ARGB8 color
ColorARGB8(String),
/// Represents a RGB8 color
ColorRGB8(String),
/// Represents a ARGB4 color
ColorARGB4(String),
/// Represents a RGB4 color
ColorRGB4(String),
/// Represents a reference to an `Entry`
ReferenceId(u32),
/// Represents a reference to an `Entry` on attribute context
AttributeReferenceId(u32),
/// Unknown value. It saves the type and the payload in case that needs to be checked
Unknown(u8, u32),
}
impl ToString for Value {
fn to_string(&self) -> String {
match self {
Self::StringReference(i) => format!("@string/{}", i),
Self::Dimension(s)
| Self::Fraction(s)
| Self::ColorARGB8(s)
| Self::ColorRGB8(s)
| Self::ColorARGB4(s)
| Self::ColorRGB4(s) => s.clone(),
Self::Float(f) => format!("{:.*}", 1, f),
Self::Integer(i) | Self::Flags(i) => i.to_string(),
Self::Boolean(b) => b.to_string(),
Self::ReferenceId(s) | Self::AttributeReferenceId(s) => format!("@id/0x{:x}", s),
_ => "Unknown".to_string(),
}
}
}
impl Value {
/// Creates a new `Value`. If the payload can not be interpreted by the given `value_type`, it
/// will return an error. If the type is not know, it will return `Value::Unknown`
pub fn create(value_type: u8, data: u32) -> Result<Self, Error> {
let value = match value_type {
TOKEN_TYPE_REFERENCE_ID | TOKEN_TYPE_DYN_REFERENCE => Self::ReferenceId(data),
TOKEN_TYPE_ATTRIBUTE_REFERENCE_ID | TOKEN_TYPE_DYN_ATTRIBUTE => {
Self::AttributeReferenceId(data)
}
TOKEN_TYPE_STRING => Self::StringReference(data),
TOKEN_TYPE_DIMENSION => {
let units: [&str; 6] = ["px", "dip", "sp", "pt", "in", "mm"];
let value = Self::complex(data);
let unit_idx = data & 0xF;
if let Some(unit) = units.get(unit_idx as usize) {
let formatted = format!("{:.*}{}", 1, value, unit);
Self::Dimension(formatted)
} else {
return Err(format_err!(
"expected a valid unit index, got: {}",
unit_idx
));
}
}
TOKEN_TYPE_FRACTION => {
let units: [&str; 2] = ["%", "%p"];
let unit_idx = (data & 0xF) as usize;
let final_value = Self::complex(data) * 100.0;
if let Some(unit) = units.get(unit_idx) {
let integer = final_value.round();
let diff = final_value - integer;
let formatted_fraction = if diff > 0.0000001 {
format!("{:.*}{}", 6, final_value, unit)
} else {
format!("{:.*}{}", 1, final_value, unit)
};
Self::Fraction(formatted_fraction)
} else {
return Err(format_err!(
"expected a valid unit index, got: {}",
unit_idx
));
}
}
TOKEN_TYPE_INTEGER => {
// TODO: Should we transmute to signed integer?
Self::Integer(data)
}
TOKEN_TYPE_FLAGS => Self::Flags(data),
TOKEN_TYPE_FLOAT => Self::Float(f32::from_bits(data)),
TOKEN_TYPE_BOOLEAN => Self::Boolean(data > 0),
TOKEN_TYPE_ARGB8 => {
let formatted_color = format!("#{:08x}", data);
Self::ColorARGB8(formatted_color)
}
TOKEN_TYPE_RGB8 => {
let formatted_color = format!("#{:08x}", data);
Self::ColorRGB8(formatted_color)
}
TOKEN_TYPE_ARGB4 => {
let formatted_color = format!("#{:08x}", data);
Self::ColorARGB4(formatted_color)
}
TOKEN_TYPE_RGB4 => {
let formatted_color = format!("#{:08x}", data);
Self::ColorRGB4(formatted_color)
}
_ => Self::Unknown(value_type, data),
};
Ok(value)
}
// TODO: maybe remove the unsafe code.
#[allow(unsafe_code)]
fn complex(data: u32) -> f32 {
// TODO: Clean this mess
let mantissa = 0xffffff << 8;
let u_value = data & mantissa;
let i_value: i32 = unsafe { mem::transmute(u_value) };
let m = i_value as f32;
let mm = 1.0 / ((1 << 8) as f32);
let radix = [
1.0 * mm,
1.0 / ((1 << 7) as f32) * mm,
1.0 / ((1 << 15) as f32) * mm,
1.0 / ((1 << 23) as f32) * mm,
];
let idx = (data >> 4) & 0x3;
m * radix[idx as usize]
}
}
#[cfg(test)]
mod tests {
use super::{
ToString, Value, TOKEN_TYPE_ARGB4, TOKEN_TYPE_ARGB8, TOKEN_TYPE_ATTRIBUTE_REFERENCE_ID,
TOKEN_TYPE_BOOLEAN, TOKEN_TYPE_DIMENSION, TOKEN_TYPE_DYN_ATTRIBUTE,
TOKEN_TYPE_DYN_REFERENCE, TOKEN_TYPE_FLAGS, TOKEN_TYPE_FLOAT, TOKEN_TYPE_FRACTION,
TOKEN_TYPE_INTEGER, TOKEN_TYPE_REFERENCE_ID, TOKEN_TYPE_RGB4, TOKEN_TYPE_RGB8,
TOKEN_TYPE_STRING,
};
#[test]
fn it_can_generate_a_string_value() {
let value = Value::create(TOKEN_TYPE_STRING, 33);
assert_eq!("@string/33", value.unwrap().to_string());
}
#[test]
fn it_can_generate_reference_and_dyn_references() {
let value = Value::create(TOKEN_TYPE_REFERENCE_ID, 12345).unwrap();
let value2 = Value::create(TOKEN_TYPE_DYN_REFERENCE, 67890).unwrap();
assert_eq!("@id/0x3039", value.to_string());
assert_eq!("@id/0x10932", value2.to_string());
}
#[test]
fn it_can_generate_attribute_and_dyn_references() {
let value = Value::create(TOKEN_TYPE_ATTRIBUTE_REFERENCE_ID, 12345).unwrap();
let value2 = Value::create(TOKEN_TYPE_DYN_ATTRIBUTE, 67890).unwrap();
assert_eq!("@id/0x3039", value.to_string());
assert_eq!("@id/0x10932", value2.to_string());
}
#[test]
fn it_can_generate_a_positive_dimension() {
let dim = 1 << 30; // Positive value 2-complement
let units = 0x5;
let value = Value::create(TOKEN_TYPE_DIMENSION, dim | units);
let str_value = value.unwrap().to_string();
assert_eq!("4194304.0mm", str_value);
}
#[test]
fn it_can_generate_a_negative_dimension() {
let dim = 1 << 31; // Negative value 2-complement
let units = 0x0;
let value = Value::create(TOKEN_TYPE_DIMENSION, dim | units);
let str_value = value.unwrap().to_string();
assert_eq!("-8388608.0px", str_value);
}
#[test]
fn it_can_not_generate_a_dimension_if_units_are_out_of_range() {
let dim = 0;
let units = 0x6;
let value = Value::create(TOKEN_TYPE_DIMENSION, dim | units);
// TODO: Assert error string!
assert!(value.is_err());
}
#[test]
fn it_can_generate_a_positive_fraction() {
let dim = 1 << 25; // Positive value 2-complement
let units = 0x1;
let value = Value::create(TOKEN_TYPE_FRACTION, dim | units);
let str_value = value.unwrap().to_string();
assert_eq!("13107200.0%p", str_value);
}
#[test]
fn it_can_generate_a_negative_fraction() {
let dim = 1 << 31 | 1 << 5 | 1 << 10; // Positive value 2-complement
let units = 0x0;
let value = Value::create(TOKEN_TYPE_FRACTION, dim | units);
let str_value = value.unwrap().to_string();
assert_eq!("-25599.988281%", str_value);
}
#[test]
fn it_can_not_generate_a_fraction_if_units_are_out_of_range() {
let dim = 1 << 31 | 1 << 5 | 1 << 10; // Positive value 2-complement
let units = 0x2;
let value = Value::create(TOKEN_TYPE_FRACTION, dim | units);
// TODO: Assert error string!
assert!(value.is_err());
}
#[test]
fn it_can_generate_integer_values() {
let int = 12345;
let value = Value::create(TOKEN_TYPE_INTEGER, int);
assert_eq!("12345", value.unwrap().to_string());
}
#[test]
fn it_can_generate_flag_values() {
let int = 12345;
let value = Value::create(TOKEN_TYPE_FLAGS, int);
assert_eq!("12345", value.unwrap().to_string());
}
#[test]
fn it_can_generate_float_values() {
// TODO: Improve this test with a IEE754 number
let float = 0;
let value = Value::create(TOKEN_TYPE_FLOAT, float);
assert_eq!("0.0", value.unwrap().to_string());
}
#[test]
fn it_can_generate_a_boolean_true_value() {
let data = 123;
let value = Value::create(TOKEN_TYPE_BOOLEAN, data);
assert_eq!("true", value.unwrap().to_string());
}
#[test]
fn it_can_generate_a_boolean_false_value() {
let data = 0;
let value = Value::create(TOKEN_TYPE_BOOLEAN, data);
assert_eq!("false", value.unwrap().to_string());
}
#[test]
fn it_can_generate_a_color_value() {
let data = 0x01AB23FE;
let value = Value::create(TOKEN_TYPE_ARGB8, data);
assert_eq!("#01ab23fe", value.unwrap().to_string());
}
#[test]
fn it_can_generate_a_color2_value() {
let data = 0x01AB23FE;
let value = Value::create(TOKEN_TYPE_RGB8, data);
assert_eq!("#01ab23fe", value.unwrap().to_string());
}
#[test]
fn it_can_generate_an_argb4_color_value() {
let data = 0x01AB23FE;
let value = Value::create(TOKEN_TYPE_ARGB4, data);
assert_eq!("#01ab23fe", value.unwrap().to_string());
}
#[test]
fn it_can_generate_a_rgb4_color_value() {
let data = 0x01AB23FE;
let value = Value::create(TOKEN_TYPE_RGB4, data);
assert_eq!("#01ab23fe", value.unwrap().to_string());
}
#[test]
fn it_generated_unknown_values_if_type_is_unkown() {
let data = 0x12345;
let value = Value::create(0x20, data);
assert_eq!("Unknown", value.unwrap().to_string());
}
}
|
use crate::geometry::*;
use crate::input::cursor::CursorManager;
use crate::output_manager::OutputManager;
use crate::surface::{Surface, SurfaceEventManager, SurfaceExt};
use crate::window::*;
use crate::window_management_policy::WmPolicyManager;
use crate::window_manager::{WindowLayer, WindowManager, WindowManagerExt};
use log::{debug, error, trace};
use std::cell::RefCell;
use std::pin::Pin;
use std::rc::Rc;
use wlroots_sys::*;
#[derive(Debug, PartialEq, Eq)]
pub struct LayerSurfaceState(*mut wlr_layer_surface_v1_state);
impl LayerSurfaceState {
pub fn attached_edges(&self) -> WindowEdge {
unsafe { WindowEdge::from_bits_truncate((*self.0).anchor) }
}
pub fn set_attached_edges(&self, attached_edges: WindowEdge) {
unsafe { (*self.0).anchor = attached_edges.bits() }
}
pub fn layer(&self) -> Result<WindowLayer, ()> {
unsafe {
#[allow(non_upper_case_globals)]
match (*self.0).layer {
zwlr_layer_shell_v1_layer_ZWLR_LAYER_SHELL_V1_LAYER_BACKGROUND => {
Ok(WindowLayer::Background)
}
zwlr_layer_shell_v1_layer_ZWLR_LAYER_SHELL_V1_LAYER_BOTTOM => Ok(WindowLayer::Bottom),
zwlr_layer_shell_v1_layer_ZWLR_LAYER_SHELL_V1_LAYER_TOP => Ok(WindowLayer::Top),
zwlr_layer_shell_v1_layer_ZWLR_LAYER_SHELL_V1_LAYER_OVERLAY => Ok(WindowLayer::Overlay),
_ => Err(()),
}
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct LayerSurface(*mut wlr_layer_surface_v1);
impl LayerSurface {
pub(crate) fn from_wlr_surface(wlr_surface: *mut wlr_surface) -> Result<LayerSurface, ()> {
unsafe {
if wlr_surface_is_layer_surface(wlr_surface) {
let layer_surface = wlr_layer_surface_v1_from_wlr_surface(wlr_surface);
Ok(LayerSurface(layer_surface))
} else {
Err(())
}
}
}
pub fn current(&self) -> LayerSurfaceState {
unsafe { LayerSurfaceState(&mut (*self.0).current) }
}
pub fn client_pending(&self) -> LayerSurfaceState {
unsafe { LayerSurfaceState(&mut (*self.0).client_pending) }
}
pub fn server_pending(&self) -> LayerSurfaceState {
unsafe { LayerSurfaceState(&mut (*self.0).server_pending) }
}
}
impl SurfaceExt for LayerSurface {
fn wl_resource(&self) -> *mut wl_resource {
unsafe { (*self.0).resource }
}
fn wlr_surface(&self) -> *mut wlr_surface {
unsafe { (*self.0).surface }
}
fn parent_wlr_surface(&self) -> Option<*mut wlr_surface> {
None
}
fn buffer_displacement(&self) -> Displacement {
let surface = unsafe { &*self.wlr_surface() };
let buffer_position = Point {
x: surface.current.dx,
y: surface.current.dy,
};
self.extents().top_left() - buffer_position
}
fn parent_displacement(&self) -> Displacement {
Displacement::ZERO
}
fn extents(&self) -> Rectangle {
unsafe {
Rectangle {
top_left: Point::ZERO,
size: Size {
width: (*self.0).current.desired_width as i32,
height: (*self.0).current.desired_height as i32,
},
}
}
}
fn move_to(&self, _top_left: Point) {}
fn resize(&self, size: Size) -> u32 {
unsafe {
wlr_layer_surface_v1_configure(self.0, size.width as u32, size.height as u32);
}
0
}
fn min_height(&self) -> Option<u32> {
None
}
fn max_height(&self) -> Option<u32> {
None
}
fn min_width(&self) -> Option<u32> {
None
}
fn max_width(&self) -> Option<u32> {
None
}
fn can_receive_focus(&self) -> bool {
unsafe { (*self.current().0).keyboard_interactive }
}
fn activated(&self) -> bool {
false
}
fn set_activated(&self, _activated: bool) -> u32 {
0
}
fn maximized(&self) -> bool {
false
}
fn set_maximized(&self, _maximized: bool) -> u32 {
0
}
fn fullscreen(&self) -> bool {
false
}
fn set_fullscreen(&self, _fullscreen: bool) -> u32 {
0
}
fn resizing(&self) -> bool {
false
}
fn set_resizing(&self, _resizing: bool) -> u32 {
0
}
fn is_toplevel(&self) -> bool {
false
}
fn app_id(&self) -> Option<String> {
None
}
fn title(&self) -> Option<String> {
None
}
fn ask_client_to_close(&self) {
unsafe {
wlr_layer_surface_v1_close(self.0);
}
}
}
wayland_listener!(
pub(crate) LayerSurfaceEventManager,
WindowEventHandler,
[
map => map_func: |this: &mut LayerSurfaceEventManager, _data: *mut libc::c_void,| unsafe {
let handler = &mut this.data;
handler.map()
};
unmap => unmap_func: |this: &mut LayerSurfaceEventManager, _data: *mut libc::c_void,| unsafe {
let handler = &mut this.data;
handler.unmap()
};
destroy => destroy_func: |this: &mut LayerSurfaceEventManager, _data: *mut libc::c_void,| unsafe {
let handler = &mut this.data;
handler.destroy();
};
commit => commit_func: |this: &mut LayerSurfaceEventManager, _data: *mut libc::c_void,| unsafe {
let handler = &mut this.data;
if let Some(window) = handler.window.upgrade() {
update_anchor_edges(handler.output_manager.clone(), &window);
if let Surface::Layer(ref layer_surface_v1) = window.surface {
handler.commit(WindowCommitEvent {
serial: (*layer_surface_v1.0).configure_serial,
});
}
}
};
]
);
pub struct LayersEventHandler {
wm_policy_manager: Rc<WmPolicyManager>,
output_manager: Rc<OutputManager>,
window_manager: Rc<WindowManager>,
cursor_manager: Rc<CursorManager>,
}
impl LayersEventHandler {
fn new_surface(&mut self, layer_surface: *mut wlr_layer_surface_v1) {
debug!("LayersEventHandler::new_surface");
// Assign an output if the client did not request one
unsafe {
if (*layer_surface).output.is_null() {
// TODO: Actually find the active output
match self.output_manager.outputs().first() {
Some(active_output) => {
trace!(
"LayersEventHandler::new_surface: Surface did not specify an output, picked: {0}",
active_output.description()
);
(*layer_surface).output = active_output.output;
}
None => {
debug!("LayersEventHandler::new_surface: Closing surface as there are no outputs");
wlr_layer_surface_v1_close(layer_surface);
return;
}
}
} else {
let output = self
.output_manager
.outputs()
.clone()
.into_iter()
.find(|output| output.raw_ptr() == (*layer_surface).output);
if let Some(output) = output {
trace!(
"LayersEventHandler::new_surface: Surface did specify output: {0}",
output.description()
);
} else {
debug!(
"LayersEventHandler::new_surface: Closing surface as it requested an invalid output"
);
wlr_layer_surface_v1_close(layer_surface);
return;
}
}
}
let surface = LayerSurface(layer_surface);
let layer = match surface.client_pending().layer() {
Ok(layer) => layer,
Result::Err(_) => {
debug!("LayersEventHandler::new_surface: Closing surface as it requested an invalid layer");
unsafe {
wlr_layer_surface_v1_close(layer_surface);
}
return;
}
};
let window = self
.window_manager
.new_window(layer, Surface::Layer(surface));
let mut event_manager = LayerSurfaceEventManager::new(WindowEventHandler {
wm_policy_manager: self.wm_policy_manager.clone(),
output_manager: self.output_manager.clone(),
window_manager: self.window_manager.clone(),
cursor_manager: self.cursor_manager.clone(),
window: Rc::downgrade(&window),
foreign_toplevel_handle: None,
foreign_toplevel_event_manager: None,
});
unsafe {
event_manager.map(&mut (*layer_surface).events.map);
event_manager.unmap(&mut (*layer_surface).events.unmap);
event_manager.destroy(&mut (*layer_surface).events.destroy);
event_manager.commit(&mut (*(*layer_surface).surface).events.commit);
}
*window.event_manager.borrow_mut() = Some(SurfaceEventManager::Layer(event_manager));
update_anchor_edges(self.output_manager.clone(), &window);
let output_manager = &self.output_manager;
let subscription_id = self.output_manager.on_output_layout_change().subscribe(
listener!(output_manager, window => move || {
update_anchor_edges(output_manager.clone(), &window);
}),
);
window
.on_destroy()
.then(listener!(output_manager => move || {
output_manager.on_output_layout_change().unsubscribe(subscription_id);
}));
self.wm_policy_manager.advise_new_window(window);
}
}
fn update_anchor_edges(output_manager: Rc<OutputManager>, window: &Window) {
if let Surface::Layer(surface) = window.surface() {
let attached_edges = surface.client_pending().attached_edges();
let margins = unsafe { (*surface.client_pending().0).margin };
let configured = unsafe { (*surface.0).configured };
let output = output_manager
.outputs()
.clone()
.into_iter()
.find(|output| output.raw_ptr() == unsafe { (*surface.0).output });
let output = match output {
Some(output) => output,
None => {
error!("LayerShell::update_anchor_edges: Could not find output for layer surface");
unsafe {
wlr_layer_surface_v1_close(surface.0);
}
return;
}
};
let mut extents = window.extents();
if attached_edges.contains(WindowEdge::TOP) && attached_edges.contains(WindowEdge::BOTTOM) {
extents.size.height = output.size().height() - (margins.top + margins.bottom) as i32;
}
if attached_edges.contains(WindowEdge::LEFT) && attached_edges.contains(WindowEdge::RIGHT) {
extents.size.width = output.size().width() - (margins.left + margins.right) as i32;
}
if attached_edges.contains(WindowEdge::TOP) {
extents.top_left.y = output.top_left().y() + margins.top as i32;
} else if attached_edges.contains(WindowEdge::BOTTOM) {
extents.top_left.y = output.top_left().y() + output.size().height()
- extents.size.height
- margins.bottom as i32;
} else {
extents.top_left.y =
output.top_left().y() + output.size().height() / 2 - extents.size.height / 2;
}
if attached_edges.contains(WindowEdge::LEFT) {
extents.top_left.x = output.top_left().x() + margins.left as i32;
} else if attached_edges.contains(WindowEdge::RIGHT) {
extents.top_left.x =
output.top_left().x() + output.size().width() - extents.size.width - margins.right as i32;
} else {
extents.top_left.x =
output.top_left().x() + output.size().width() / 2 - extents.size.width / 2;
}
if !configured || extents.size != window.extents().size {
unsafe {
wlr_layer_surface_v1_configure(
surface.0,
extents.size.width as u32,
extents.size.height as u32,
)
};
}
window.move_to(extents.top_left);
}
}
wayland_listener!(
LayersEventManager,
Rc<RefCell<LayersEventHandler>>,
[
new_surface => new_surface_func: |this: &mut LayersEventManager, data: *mut libc::c_void,| unsafe {
let handler = &mut this.data;
handler.borrow_mut().new_surface(data as _)
};
]
);
#[allow(unused)]
pub(crate) struct LayerShellManager {
layer_shell: *mut wlr_layer_shell_v1,
event_manager: Pin<Box<LayersEventManager>>,
event_handler: Rc<RefCell<LayersEventHandler>>,
}
impl LayerShellManager {
pub(crate) fn init(
wm_policy_manager: Rc<WmPolicyManager>,
output_manager: Rc<OutputManager>,
window_manager: Rc<WindowManager>,
cursor_manager: Rc<CursorManager>,
display: *mut wl_display,
) -> LayerShellManager {
debug!("LayerShellManager::init");
let layer_shell = unsafe { wlr_layer_shell_v1_create(display) };
let event_handler = Rc::new(RefCell::new(LayersEventHandler {
wm_policy_manager,
output_manager,
window_manager,
cursor_manager,
}));
let mut event_manager = LayersEventManager::new(event_handler.clone());
unsafe {
event_manager.new_surface(&mut (*layer_shell).events.new_surface);
}
LayerShellManager {
layer_shell,
event_manager,
event_handler,
}
}
}
|
use unsafe_hacspec_examples::ec::{arithmetic, p256, p384, Affine};
use hacspec_dev::prelude::*;
use hacspec_lib::prelude::*;
use rayon::prelude::*;
create_test_vectors!(
TestVector,
algorithm: String,
generatorVersion: String,
numberOfTests: usize,
header: Vec<Value>, // not used
notes: Option<Value>, // text notes (might not be present), keys correspond to flags
schema: String, // not used
testGroups: Vec<TestGroup>
);
create_test_vectors!(
TestGroup,
curve: String,
encoding: String,
r#type: String,
tests: Vec<Test>
);
create_test_vectors!(
Test,
tcId: usize,
comment: String,
public: String,
private: String,
shared: String,
result: String,
flags: Vec<String>
);
#[allow(non_snake_case)]
fn run_test<Scalar: UnsignedIntegerCopy, FieldElement: UnsignedIntegerCopy>(
tests: TestVector,
curve: &'static str,
) {
match tests.algorithm.as_ref() {
"ECDH" => (),
_ => panic!("This is not an ECDH test vector."),
};
for testGroup in tests.testGroups.iter() {
assert_eq!(testGroup.r#type, "EcdhEcpointTest");
assert_eq!(testGroup.curve, curve);
assert_eq!(testGroup.encoding, "ecpoint");
let point_len = match curve {
"secp256r1" => 64,
"secp384r1" => 96,
_ => panic!("I don't know that curve"),
};
testGroup.tests.par_iter().for_each(|test| {
println!("Test {:?}: {:?}", test.tcId, test.comment);
if !test.result.eq("valid") {
println!("We're only doing valid tests for now.");
return;
}
if test.comment == "compressed public key" {
// not implemented
println!("Compressed public keys are not supported.");
return;
}
assert_eq!(&test.public[0..2], "04");
let k = Scalar::from_hex_string(&test.private);
// println!("k: {:?}", k);
let p = Affine(
FieldElement::from_hex_string(&test.public[2..point_len + 2].to_string()),
FieldElement::from_hex_string(&test.public[point_len + 2..].to_string()),
);
// println!("p: {:?}", p);
let expected = FieldElement::from_hex_string(&test.shared);
// println!("expected: {:?}", expected);
let shared = arithmetic::point_mul(k, p);
// println!("computed: {:?}", shared);
assert!(shared.0.equal(expected));
});
}
}
#[test]
fn test_wycheproof_384_plain() {
let tests: TestVector = TestVector::from_file("tests/ecdh_secp384r1_ecpoint_test.json");
run_test::<p384::Scalar, p384::FieldElement>(tests, "secp384r1");
}
#[test]
fn test_wycheproof_256_plain() {
let tests: TestVector = TestVector::from_file("tests/ecdh_secp256r1_ecpoint_test.json");
run_test::<p256::Scalar, p256::FieldElement>(tests, "secp256r1");
}
|
/*
Creates a batch job and task using the data plane APIs
cargo run --package azure_svc_batch --example create_task
*/
use azure_identity::token_credentials::AzureCliCredential;
use azure_svc_batch::models::{JobAddParameter, PoolInformation, TaskAddParameter};
use azure_svc_batch::operations::{job, task};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let account_name = std::env::args().nth(1).expect("please specify batch account");
let region = std::env::args().nth(2).expect("please specify region");
let pool_id = std::env::args().nth(3).expect("please specify pool");
let job_id = std::env::args().nth(4).expect("please specify job_id");
let task_id = std::env::args().nth(5).expect("please specify task_id");
let base_path = format!("https://{}.{}.batch.azure.com", account_name, region);
let http_client = azure_core::new_http_client();
let token_credential = Box::new(AzureCliCredential {});
let config = &azure_svc_batch::config(http_client, token_credential)
.base_path(base_path)
.token_credential_resource("https://batch.core.windows.net/")
.build();
let pool_id = Some(pool_id);
let pool_info = PoolInformation {
pool_id,
auto_pool_specification: None,
};
let job_params = JobAddParameter {
id: job_id.to_string(),
display_name: None,
priority: None,
max_parallel_tasks: None,
constraints: None,
job_manager_task: None,
job_preparation_task: None,
job_release_task: None,
common_environment_settings: vec![],
pool_info,
on_all_tasks_complete: None,
on_task_failure: None,
metadata: vec![],
uses_task_dependencies: None,
network_configuration: None,
};
println!("creating job");
job::add(&config, &job_params, None, None, None, None).await?;
let constraints = None;
let command_line = "echo hello there".to_string();
let task = TaskAddParameter {
affinity_info: None,
application_package_references: vec![],
authentication_token_settings: None,
container_settings: None,
constraints,
command_line,
display_name: None,
environment_settings: vec![],
depends_on: None,
exit_conditions: None,
id: task_id.to_string(),
multi_instance_settings: None,
required_slots: None,
resource_files: vec![],
output_files: vec![],
user_identity: None,
};
println!("creating task");
task::add(&config, &job_id, &task, None, None, None, None).await?;
Ok(())
}
|
use crate::schema::statement_of_accounts;
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
#[derive(GraphQLObject, Queryable, Debug, Serialize, Deserialize)]
pub struct StatementOfAccount {
pub id: i32,
pub description: Option<String>,
pub starting: NaiveDateTime,
pub ending: NaiveDateTime,
pub added: NaiveDateTime,
pub changed: Option<NaiveDateTime>,
}
#[derive(Insertable)]
#[table_name = "statement_of_accounts"]
pub struct NewStatementOfAccount {
pub description: Option<String>,
pub starting: NaiveDateTime,
pub ending: NaiveDateTime,
pub added: NaiveDateTime,
pub changed: Option<NaiveDateTime>,
}
impl NewStatementOfAccount {
pub fn from_input(input: InputStatementOfAccount) -> Self {
Self {
description: input.description,
starting: input.starting,
ending: input.ending,
added: chrono::Utc::now().naive_utc(),
changed: None,
}
}
}
#[derive(GraphQLInputObject, Deserialize)]
pub struct InputStatementOfAccount {
pub description: Option<String>,
pub starting: NaiveDateTime,
pub ending: NaiveDateTime,
}
#[derive(GraphQLInputObject, AsChangeset, Deserialize)]
#[table_name = "statement_of_accounts"]
pub struct UpdateStatementOfAccount {
pub description: Option<String>,
pub starting: Option<NaiveDateTime>,
pub ending: Option<NaiveDateTime>,
}
|
pub mod proxy_info;
pub use crate::proxy_info::{
Anonymity,
ProxyInfo,
ProxyInfoError,
};
pub use isocountry::CountryCode;
use select::{
document::Document,
predicate::{
Attr,
Name,
},
};
use std::time::Duration;
pub type ProxyResult<T> = Result<T, ProxyError>;
#[derive(Debug)]
pub enum ProxyError {
Reqwest(reqwest::Error),
Io(std::io::Error),
MissingTable,
}
impl From<reqwest::Error> for ProxyError {
fn from(e: reqwest::Error) -> Self {
Self::Reqwest(e)
}
}
impl From<std::io::Error> for ProxyError {
fn from(e: std::io::Error) -> Self {
Self::Io(e)
}
}
#[derive(Default)]
pub struct Client {
client: reqwest::Client,
}
impl Client {
pub fn new() -> Self {
Client {
client: reqwest::Client::new(),
}
}
pub async fn get_list(&self) -> ProxyResult<Vec<Result<ProxyInfo, ProxyInfoError>>> {
let res = self
.client
.get("https://free-proxy-list.net/")
.send()
.await?;
let text = res.text().await?;
let doc = Document::from(text.as_str());
let table = doc
.find(Attr("id", "proxylisttable"))
.last()
.ok_or(ProxyError::MissingTable)?;
let table_body = table
.find(Name("tbody"))
.last()
.ok_or(ProxyError::MissingTable)?
.find(Name("tr"))
.map(ProxyInfo::from_node)
.collect::<Vec<_>>();
Ok(table_body)
}
}
pub async fn probe<'a, T: Iterator<Item = &'a ProxyInfo>>(iter: T, timeout: Duration) -> Vec<bool> {
use futures::future::join_all;
let iter = iter.map(|info| async move {
let url = info.get_url();
let proxy = match reqwest::Proxy::all(&url) {
Ok(p) => p,
Err(_) => return false,
};
let client = match reqwest::Client::builder()
.timeout(timeout)
.proxy(proxy)
.build()
{
Ok(c) => c,
Err(_) => return false,
};
let res = match client.get("http://whatsmyip.me/").send().await {
Ok(r) => r,
Err(_) => {
return false;
}
};
if !res.status().is_success() {
return false;
}
res.text().await.is_ok()
});
join_all(iter).await
}
|
pub mod labeling;
pub mod translation;
pub mod axiom_translation;
pub mod class_translation;
pub mod property_translation;
|
use super::super::context::*;
use super::super::error::*;
use super::super::traits::WorkType;
use super::super::utils::*;
use super::super::work::{WorkBox, WorkOutput};
use conveyor::ConveyorError;
use conveyor::*;
use conveyor_http::{Http as WHttp, HttpResponse, HttpResponseReader, Url};
use conveyor_work::http::{HttpOptions, Method};
use conveyor_work::prelude::*;
use slog::Logger;
use std::pin::Pin;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct HttpResponseStream;
impl Station for HttpResponseStream {
type Input = HttpResponse;
type Output =
Pin<Box<conveyor::futures::stream::Stream<Item = Result<Vec<u8>>> + Send + 'static>>;
type Future = conveyor::futures::future::Ready<Result<Self::Output>>;
fn execute(&self, mut input: Self::Input) -> Self::Future {
conveyor::futures::future::ready(Ok(Box::pin(input.stream())))
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Http {
#[serde(skip_serializing_if = "Option::is_none")]
pub method: Option<Method>,
}
#[typetag::serde]
impl WorkType for Http {
fn request_station(&self, ctx: &mut Context) -> CrawlResult<WorkBox<Package>> {
let method = self.method.as_ref().unwrap_or(&Method::GET).clone();
let http = WHttp::new().pipe(HttpResponseStream);
let log = ctx
.log()
.new(o!("worktype" => "http", "method" => format!("{:?}", method)));
info!(log, "request http station");
Ok(into_box(station_fn_ctx2(
async move |mut package: Package,
ctx: Arc<(Conveyor<WHttp, HttpResponseStream>, Method, Logger)>| {
let body = await!(package.read_content())?;
let json: String =
serde_json::from_slice(&body).map_err(|e| ConveyorError::new(e))?;
let url = Url::parse(&json).map_err(|e| ConveyorError::new(e))?;
info!(ctx.2, "making request"; "url" => url.as_str());
let options = HttpOptions::new(ctx.1.clone(), url);
let body = await!(ctx.0.execute(options.to_request()))?;
info!(ctx.2, "request done"; "url" => &json);
Ok(vec![WorkOutput::Result(Ok(package.set_value(body)))])
},
Arc::new((http, method, log)),
)))
}
fn box_clone(&self) -> Box<WorkType> {
Box::new(self.clone())
}
}
#[cfg(test)]
mod tests {
use super::super::super::prelude::*;
use super::super::super::utils::WorkBoxWrapper;
use super::super::super::work;
use super::*;
use conveyor_work::http::Method;
use conveyor_work::prelude::*;
use serde_json::Value;
use slog::Logger;
use tokio;
#[test]
fn test_http() {
tokio::run_async(
async {
// let http = Http {
// method: Some(Method::GET),
// };
// let mut ctx = MockContext;
// let station = http.request_station(&mut ctx).unwrap();
// let worker = work::Worker::new();
// // worker.run(vec![Work::new(
// // Package::new("test", "https://distrowatch.com/"),
// // WorkBoxWrapper::new(station),
// // )]);
// // let pack =
// // await!(station.execute(Package::new("test", "https://distrowatch.com/")))
// // .unwrap();
// let pack = await!(worker.run(vec![work::Work::new(
// Package::new("test", Value::String("https://distrowatch.com/".to_owned())),
// WorkBoxWrapper::new(station),
// )]));
// for p in pack {
// if let Ok(ret) = p {
// println!("name {}", ret.name());
// } else {
// println!("Err {:?}", p.err());
// }
// }
},
);
}
}
|
//! Implements mesh generation for sectors.
//!
//! Each sector is a small rendererable chunk of the voxel world,
//! and is assigned a VAO in the form of a ``Tesselation``.
//! This module provides the logic that generates a list of vertex
//! attributes from a list of voxels.
//!
//! In other words, it makes models for the sectors.
use super::data::{SectorCoords, SectorData, SECTOR_MAX, SECTOR_MIN};
use crate::{
block::Block,
side::Side,
vertexattrib::{PosAttrib, UvAttrib, VoxelVertex},
};
use png::OutputInfo;
use std::ops::Add;
/// Stores vertex attributes and indices in memory.
///
/// This structure provides a way to store vertices
/// until they are uploaded to graphics memory by
/// constructing a ``Tess``.
pub struct PreGeometry {
pub vertices: Vec<VoxelVertex>,
pub indices: Vec<u32>,
}
// Visual length of the cube sides in
// OpenGL model units.
// const EDGE_LEN: f32 = 1.;
// Square edge length of an individual
// texture on the texture atlas in pixels.
const TILE_SIZE: u32 = 16;
// Floating-point representation of the
// ``TILE_SIZE`` constant.
const TILE_SIZE_F32: f32 = TILE_SIZE as f32;
// Stores all information needed to represent
// a single face of a cube block.
#[rustfmt::skip]
#[derive(Clone, Debug)]
struct Face {
side: Side, // Which side is the face on, or which block is adjacent to this face?
positions: [usize; 4], // Indices into the POSITIONS constant.
flip_u: bool, // Whether to flip the U coords.
flip_v: bool, // Whether to flip the V coords.
u_idx: usize, // Does the U coord correspond to X, Y, or Z?
v_idx: usize, // Does the V coord correspond to X, Y, or Z?
}
impl Face {
const fn new(
side: Side,
positions: [usize; 4],
flip_u: bool,
flip_v: bool,
u_idx: usize,
v_idx: usize,
) -> Face {
Face {
side,
positions,
flip_u,
flip_v,
u_idx,
v_idx,
}
}
}
#[rustfmt::skip]
const FACES: [Face; 6] = [
Face::new(Side::Front, [4, 5, 6, 7], false, false, 0, 1), // front
Face::new(Side::Back, [3, 2, 1, 0], true, false, 0, 1), // back
Face::new(Side::RightSide, [2, 6, 5, 1], true, false, 2, 1), // right side
Face::new(Side::LeftSide, [7, 3, 0, 4], false, false, 2, 1), // left side
Face::new(Side::Top, [7, 6, 2, 3], false, true, 0, 2), // top
Face::new(Side::Bottom, [0, 1, 5, 4], false, false, 0, 2), // bottom
];
/// Relative positions of the vertices on
/// a cube. There are eight *unique* positions,
/// even though each of the six faces will eventually
/// have four of its own vertices.
const POSITIONS: [[f32; 3]; 8] = [
[0., 0., 0.],
[1., 0., 0.],
[1., 1., 0.],
[0., 1., 0.],
[0., 0., 1.],
[1., 0., 1.],
[1., 1., 1.],
[0., 1., 1.],
];
/// Generate the mesh for the given ``SectorData``.
///
/// If there are visible voxels in the data, their
/// vertices are added to the pre-geometry, which
/// is returned in a ``Some<PreGeometry>``.
///
/// If, on the other hand, there are no visible voxels
/// in the sector data, ``None`` is returned.
pub fn gen_terrain(tex_info: &OutputInfo, voxels: &SectorData) -> Option<PreGeometry> {
// Initialize empty vectors to hold the vertex
// attribute data that will be generated.
// Also, keep track of the last index, as the
// voxels are drawn with Indexed Rendering.
let mut vertices = Vec::new();
let mut indices: Vec<u32> = Vec::new();
let mut current_index = 0;
// For every ``Block``, or voxel, in the sector, we
// will need to draw between zero and six faces.
for (coords, blk) in voxels {
// Pull the x, y, z components out of the iterator's
// Item for the sake of readability.
let SectorCoords(x, y, z) = coords;
// If a block lies in the padding range of a sector
// it should only be rendered by the neighboring sector.
// Skip it.
if x == SECTOR_MIN
|| x == SECTOR_MAX
|| y == SECTOR_MIN
|| y == SECTOR_MAX
|| z == SECTOR_MIN
|| z == SECTOR_MAX
{
continue;
}
// If a block is air, it doesn't have any geometry,
// and is skipped.
if *blk == Block::Air {
continue;
}
// The coordinates of the block will be needed as
// floating-point quantities as well.
// Cast them here.
let factors = (x as f32, y as f32, z as f32);
// Now, each cube has six faces.
// For each face, we check if the face is occluded,
// or blocked by another voxel. If it is, we skip it
// for performance. Otherwise, we generate the four
// vertices for that square cube face.
//
// The face attributes are hardcoded in the FACES
// constant above.
for f in &FACES {
// Check if the neighboring block occludes the face
// we are drawing.
if let Some(adj_coords) = coords.neighbor(f.side) {
// Look up the adjacent block.
let adj_block = voxels.block(adj_coords);
// If it does, skip drawing this face of block.
if !adj_block.is_transparent() {
continue;
}
}
// If we are here, we are drawing one of the faces
// of the cube.
//
// Each face has four vertices, so the loop below
// will run four times, once for each vertex in the
// quadrilateral face.
//
// pos_idx is (a reference to) an index into the hardcoded
// array of relative ``POSITIONS`` above.
for pos_idx in &f.positions {
let pos_idx = *pos_idx;
// Add the vertex to the list of vertices that will be
// stored in the vertex buffer.
//
// The position must be converted from the relative cube
// position into the sector space. This is done by adding
// a different offset to each component, so that the origin
// of the cube in the correct "slot" in the sector grid.
//
// As for the texture coordinate, it is calculated dynamically
// from the relative positions by the tex_coord function below.
vertices.push(VoxelVertex {
pos: PosAttrib::new(translate3(POSITIONS[pos_idx], factors)),
uv: UvAttrib::new(tex_coord(tex_info, blk, POSITIONS[pos_idx], f)),
});
}
// Each face uses the same relative set of indices
// for indexed rendering. Push the first triangle...
indices.push(current_index);
indices.push(current_index + 1);
indices.push(current_index + 2);
// ... and the second.
indices.push(current_index);
indices.push(current_index + 2);
indices.push(current_index + 3);
// Each face has four vertices, so increment our
// counter by that fixed step.
current_index += 4;
}
}
if current_index == 0 {
// In this case, there were no visible blocks
// in the sector, so None is returned for the
// model.
return None;
}
Some(PreGeometry { vertices, indices })
}
// Returns the translated vertex position for the block with
// lower left back corner at orig.
fn translate3<T>(orig: [T; 3], factors: (T, T, T)) -> [T; 3]
where
T: Add<Output = T> + Copy,
{
[
factors.0 + orig[0],
factors.1 + orig[1],
factors.2 + orig[2],
]
}
/// Calculate the texture coordinate for a vertex, given the relative
/// cube position of the vertex and necessary metadata.
///
/// The textures for the world are stored on a texture atlas.
/// An individual texture on the atlas is called a "tile".
///
/// The texture coordinates are derived directly from the relative
/// cube positions, passed as ``orig`` (for "original").
///
/// However, there is a complication. Depending on whether the face
/// is on the side, top, or bottom of the cube, the 2D texture coodinates
/// must be pulled from a different two components of the 3D vertex
/// positions. For the front, the texture coords are derived from the
/// X and Y positions, while for the top, they are derived from the X and
/// Z coordinates. The ``Face`` struct contains this information in the
/// form of two fields, ``u_idx`` and ``v_idx``, that indicate which
/// element of ``orig`` is representative of the texture coordinate
/// component in question.
///
/// Another problem remains: for any given face on the cube, the opposing
/// face uses the same ``u_idx`` and ``v_idx``, but the texture coordinates
/// are flipped over either the U or V axis. To address this problem, a
/// ``Face`` also stores boolean ``flip_u`` and ``flip_v`` fields that
/// indicate whether the respective component of the texture coordinate
/// should be inverted.
///
/// The two remaining arguments are ``tex_info`` and ``blk``.
///
/// ``tex_info`` is simply used to query the size of the texture atlas
/// as a whole. This is necessary because OpenGL uses texture coordinate
/// components in the relative range [0, 1], but the algorithm initially
/// determines the texture coordinate in absolute pixel coordinates.
/// Dividing by the width or height of the atlas yields the needed relative
/// position.
///
/// ``blk`` is the block that we are creating the texture coordinate for.
/// It is used to select the correct tile from the atlas.
#[rustfmt::skip]
fn tex_coord(tex_info: &OutputInfo, blk: &Block, orig: [f32; 3], face: &Face) -> [f32; 2] {
// Alias some common values.
let flip_u = face.flip_u;
let flip_v = face.flip_v;
let u_idx = face.u_idx;
let v_idx = face.v_idx;
let blk_side = face.side;
// Query the size of the entire texture atlas.
let (width, height) = (tex_info.width, tex_info.height);
// Determine the number of tiles there are in a single row
// of the texture atlas.
let tiles_per_row = width / TILE_SIZE;
let tiles_per_col = height / TILE_SIZE;
// Determine the texture coordinate with respect to the *tile*.
// These values will be in the open range [0, 1].
//
// V is reversed since textures have an inverted y-axis.
let tile_u = if flip_u { -orig[u_idx] + 1. } else { orig[u_idx] };
let tile_v = if flip_v { orig[v_idx] } else { -orig[v_idx] + 1. };
// A small (half-pixel) adjustment needs to be added or subtracted to or from
// the ``tile_u`` and ``tile_v`` values.
//
// The offset is equal to 1 / 256 for a tile size of 16, which allows the
// texture coordinate to lie just within the bounds of the target pixel,
// rather than exactly the edge.
//
// Without this offset, fragments from the neighboring tile may be erroneously
// rendered.
let offset = 1. / (16. * TILE_SIZE_F32);
let tile_u_adj = if tile_u < 0.5 { tile_u + offset } else { tile_u - offset };
let tile_v_adj = if tile_v < 0.5 { tile_v + offset } else { tile_v - offset };
// Determine the block's texture id, and convert it to a f32.
// For some blocks, the texture depends on which side of the
// block is in consideration, so the ``texture_id`` method
// also takes the ``side`` field from our ``Face``.
let blk_id = blk.texture_id(blk_side);
let atlas_u = (blk_id % tiles_per_row) as f32;
let atlas_v = (blk_id / tiles_per_row) as f32;
// Select the correct corner of the tile in question.
[(tile_u_adj + atlas_u) * TILE_SIZE_F32 / width as f32,
(tile_v_adj + atlas_v) * TILE_SIZE_F32 / height as f32]
}
|
use std::{env, path::Path, process::Command};
pub fn build(manifest_dir: &Path, target_triple: &str, out_dir: &Path) {
println!("cargo:rerun-if-env-changed=CC");
println!("cargo:rerun-if-env-changed=CXX");
println!("cargo:rerun-if-changed=cfltk/CMakeLists.txt");
println!("cargo:rerun-if-changed=cfltk/include/cfl.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_widget.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_group.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_input.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_output.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_window.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_button.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_box.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_menu.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_dialog.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_valuator.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_browser.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_misc.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_text.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_image.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_draw.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_table.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_surface.h");
println!("cargo:rerun-if-changed=cfltk/include/cfl_printer.h");
println!("cargo:rerun-if-changed=cfltk/src/cfl_global.hpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_new.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_widget.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_group.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_window.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_button.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_box.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_menu.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_dialog.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_valuator.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_browser.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_misc.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_text.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_image.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_input.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_output.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_draw.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_table.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_tree.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_surface.cpp");
println!("cargo:rerun-if-changed=cfltk/src/cfl_printer.cpp");
Command::new("git")
.args(&["submodule", "update", "--init", "--recursive"])
.current_dir(manifest_dir)
.status()
.expect("Git is needed to retrieve the fltk source files!");
if target_triple.contains("android") || target_triple.contains("windows") {
Command::new("git")
.args(&["apply", "../fltk.patch"])
.current_dir(manifest_dir.join("cfltk").join("fltk"))
.status()
.expect("Git is needed to retrieve the fltk source files!");
}
if !target_triple.contains("android") {
let mut dst = cmake::Config::new("cfltk");
if cfg!(feature = "fltk-shared") {
dst.define("CFLTK_BUILD_SHARED", "ON");
}
if cfg!(feature = "use-ninja") || crate::utils::has_program("ninja") {
dst.generator("Ninja");
}
if cfg!(feature = "system-fltk") {
dst.define("USE_SYSTEM_FLTK", "ON");
}
if cfg!(feature = "system-libpng")
|| (!target_triple.contains("apple")
&& !target_triple.contains("windows")
&& !target_triple.contains("android"))
{
dst.define("OPTION_USE_SYSTEM_LIBPNG", "ON");
} else {
dst.define("OPTION_USE_SYSTEM_LIBPNG", "OFF");
}
if cfg!(feature = "system-libjpeg") {
dst.define("OPTION_USE_SYSTEM_LIBJPEG", "ON");
} else {
dst.define("OPTION_USE_SYSTEM_LIBJPEG", "OFF");
}
if cfg!(feature = "system-zlib") {
dst.define("OPTION_USE_SYSTEM_ZLIB", "ON");
} else {
dst.define("OPTION_USE_SYSTEM_ZLIB", "OFF");
}
if cfg!(feature = "no-images") {
dst.define("CFLTK_LINK_IMAGES", "OFF");
} else {
dst.define("CFLTK_LINK_IMAGES", "ON");
}
if cfg!(feature = "legacy-opengl") {
dst.define("OpenGL_GL_PREFERENCE", "LEGACY");
} else {
dst.define("OpenGL_GL_PREFERENCE", "GLVND");
}
if cfg!(feature = "enable-glwindow") {
dst.define("OPTION_USE_GL", "ON");
dst.define("CFLTK_USE_OPENGL", "ON");
} else {
dst.define("OPTION_USE_GL", "OFF");
dst.define("CFLTK_USE_OPENGL", "OFF");
}
if let Ok(toolchain) = env::var("CFLTK_TOOLCHAIN") {
dst.define("CMAKE_TOOLCHAIN_FILE", &toolchain);
}
if target_triple.contains("linux") && !target_triple.contains("android") {
if cfg!(feature = "no-pango") {
dst.define("OPTION_USE_PANGO", "OFF");
} else {
dst.define("OPTION_USE_PANGO", "ON");
}
}
if target_triple.contains("unknown-linux-musl") {
dst.define("CMAKE_C_COMPILER", "musl-gcc");
dst.define("CMAKE_CXX_COMPILER", "musl-gcc");
dst.define("HAVE_STRLCPY", "False");
dst.define("HAVE_STRLCAT", "False");
}
let _dst = dst
.profile("Release")
.define("CMAKE_EXPORT_COMPILE_COMMANDS", "ON")
.define("FLTK_BUILD_EXAMPLES", "OFF")
.define("FLTK_BUILD_TEST", "OFF")
.define("OPTION_USE_THREADS", "ON")
.define("OPTION_LARGE_FILE", "ON")
.define("OPTION_BUILD_HTML_DOCUMENTATION", "OFF")
.define("OPTION_BUILD_PDF_DOCUMENTATION", "OFF")
.build();
} else {
crate::android::build(out_dir, &target_triple);
}
if target_triple.contains("android") || target_triple.contains("windows") {
Command::new("git")
.args(&["reset", "--hard"])
.current_dir(manifest_dir.join("cfltk").join("fltk"))
.status()
.expect("Git is needed to retrieve the fltk source files!");
}
}
|
pub mod get_report_entries;
pub mod types;
|
use std::io::{Write};
use super::Generator;
use super::ast::{Code};
use super::ast::Statement::{Var, Expr};
use super::ast::Expression::{Call, Name, Str, Attr};
impl<'a, W:Write+'a> Generator<'a, W> {
pub fn add_css(&self, code: Code, css: &str) -> Code {
let stmt = vec![
// var _style = document.createElement('style')
Var(String::from("_style"), Call(
Box::new(Attr(Box::new(Name(String::from("document"))),
String::from("createElement"))),
vec![Str(String::from("style"))])),
// _style.appendChild(document.createTextNode(css_text))
Expr(Call(
Box::new(Attr(Box::new(Name(String::from("_style"))),
String::from("appendChild"))),
vec![Call(
Box::new(Attr(Box::new(Name(String::from("document"))),
String::from("createTextNode"))),
vec![Str(css.to_string())])])),
// document.head.appendChild(_style)
Expr(Call(Box::new(
Attr(Box::new(Attr(Box::new(Name(String::from("document"))),
String::from("head"))),
String::from("appendChild"))),
vec![Name(String::from("_style"))])),
];
return Code {
statements: stmt.into_iter()
.chain(code.statements.into_iter())
.collect(),
}
}
}
|
use super::super::data::DataReader;
use super::super::entry::Entry;
use super::super::env::SeriesEnv;
use super::super::error::Error;
use super::super::file_system::{FileKind, OpenMode};
use std::collections::VecDeque;
use std::sync::Arc;
pub struct SeriesReader {
env: Arc<SeriesEnv>,
}
impl SeriesReader {
pub fn create(env: Arc<SeriesEnv>) -> Result<SeriesReader, Error> {
Ok(SeriesReader { env: env.clone() })
}
pub fn iterator(&self, from_ts: i64) -> Result<SeriesIterator, Error> {
let commit = self.env.commit_log().current();
let start_offset = self
.env
.index()
.ceiling_offset(from_ts, commit.index_offset)?
.unwrap_or(0);
Ok(SeriesIterator {
data_reader: DataReader::create(
self.env.dir().open(FileKind::Data, OpenMode::Read)?,
start_offset,
)?,
offset: start_offset,
size: commit.data_offset,
from_ts,
buffer: VecDeque::new(),
})
}
}
pub struct SeriesIterator {
data_reader: DataReader,
offset: u32,
size: u32,
from_ts: i64,
buffer: VecDeque<Entry>,
}
impl SeriesIterator {
fn fetch_block(&mut self) -> Result<(), Error> {
if self.offset < self.size {
let (entries, offset) = self.data_reader.read_block()?;
self.offset = offset;
self.buffer = entries.into();
while self
.buffer
.front()
.filter(|e| e.ts < self.from_ts)
.is_some()
{
self.buffer.pop_front();
}
}
Ok(())
}
}
impl Iterator for SeriesIterator {
type Item = Result<Entry, Error>;
fn next(&mut self) -> Option<Self::Item> {
if self.buffer.is_empty() {
if let Err(error) = self.fetch_block() {
return Some(Err(error));
}
}
match self.buffer.pop_front() {
Some(entry) => Some(Ok(entry)),
_ => None,
}
}
}
|
use crate::ray::Ray;
use crate::vec3::Vec3;
pub struct Camera {
origin: Vec3,
lower_left_corner: Vec3,
horizontal: Vec3,
vertical: Vec3,
}
impl Camera {
pub fn new() -> Self {
Camera {
origin: Vec3::new(0., 0., 0.),
lower_left_corner: Vec3::new(-2., -1., -1.),
horizontal: Vec3::new(4., 0., 0.),
vertical: Vec3::new(0., 2., 0.),
}
}
pub fn get_ray(&self, u: f32, v: f32) -> Ray {
Ray::new(
self.origin,
self.lower_left_corner + self.horizontal * u + self.vertical * v - self.origin,
)
}
}
|
#[doc = "Reader of register IC_RX_TL"]
pub type R = crate::R<u32, super::IC_RX_TL>;
#[doc = "Writer for register IC_RX_TL"]
pub type W = crate::W<u32, super::IC_RX_TL>;
#[doc = "Register IC_RX_TL `reset()`'s with value 0"]
impl crate::ResetValue for super::IC_RX_TL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `RX_TL`"]
pub type RX_TL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `RX_TL`"]
pub struct RX_TL_W<'a> {
w: &'a mut W,
}
impl<'a> RX_TL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Receive FIFO Threshold Level.\\n\\n Controls the level of entries (or above) that triggers the RX_FULL interrupt (bit 2 in IC_RAW_INTR_STAT register). The valid range is 0-255, with the additional restriction that hardware does not allow this value to be set to a value larger than the depth of the buffer. If an attempt is made to do that, the actual value set will be the maximum depth of the buffer. A value of 0 sets the threshold for 1 entry, and a value of 255 sets the threshold for 256 entries."]
#[inline(always)]
pub fn rx_tl(&self) -> RX_TL_R {
RX_TL_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Receive FIFO Threshold Level.\\n\\n Controls the level of entries (or above) that triggers the RX_FULL interrupt (bit 2 in IC_RAW_INTR_STAT register). The valid range is 0-255, with the additional restriction that hardware does not allow this value to be set to a value larger than the depth of the buffer. If an attempt is made to do that, the actual value set will be the maximum depth of the buffer. A value of 0 sets the threshold for 1 entry, and a value of 255 sets the threshold for 256 entries."]
#[inline(always)]
pub fn rx_tl(&mut self) -> RX_TL_W {
RX_TL_W { w: self }
}
}
|
#![no_std]
#![feature(alloc)]
extern crate alloc;
use alloc::collections::BTreeMap;
use alloc::string::String;
use alloc::vec::Vec;
extern crate common;
use common::contract_api::{get_arg, ret, store_function};
fn hello_name(name: &str) -> String {
let mut result = String::from("Hello, ");
result.push_str(name);
result
}
#[no_mangle]
pub extern "C" fn hello_name_ext() {
let name: String = get_arg(0);
let y = hello_name(&name);
ret(&y, &Vec::new());
}
#[no_mangle]
pub extern "C" fn call() {
let _hash = store_function("hello_name_ext", BTreeMap::new());
}
|
use std::{
collections::HashMap,
sync::{Mutex, RwLock},
};
use apllodb_shared_components::{ApllodbError, ApllodbResult};
use crate::correlation::correlation_name::CorrelationName;
use super::{
node_id::{QueryPlanNodeId, QueryPlanNodeIdGenerator},
node_kind::QueryPlanNodeKind,
QueryPlanNode,
};
#[derive(Debug, Default)]
pub(crate) struct QueryPlanNodeRepository {
hmap: RwLock<HashMap<QueryPlanNodeId, QueryPlanNode>>,
id_gen: Mutex<QueryPlanNodeIdGenerator>,
}
impl QueryPlanNodeRepository {
pub(crate) fn create(&self, kind: QueryPlanNodeKind) -> QueryPlanNodeId {
let node = QueryPlanNode::new(self.id_gen.lock().unwrap().gen(), kind);
let id = node.id;
self.hmap.write().unwrap().insert(id, node);
id
}
/// # Panics
///
/// - no node has been created.
pub(crate) fn latest_node_id(&self) -> QueryPlanNodeId {
self.hmap
.read()
.unwrap()
.iter()
.max_by(|(a_id, _), (b_id, _)| a_id.cmp(b_id))
.map(|(id, _)| *id)
.expect("no QueryPlanNode exists (already removed?)")
}
/// # Failures
///
/// - [NameErrorNotFound](apllodb-shared-components::SqlState::NameErrorNotFound) when:
/// - no matching node is found.
pub(crate) fn find_correlation_node(
&self,
correlation_name: &CorrelationName,
) -> ApllodbResult<QueryPlanNodeId> {
self.hmap
.read()
.unwrap()
.iter()
.find_map(|(id, node)| {
node.source_correlation_name()
.map(|corr_name| (correlation_name == &corr_name).then(|| *id))
.flatten()
})
.ok_or_else(|| {
ApllodbError::name_error_not_found("no QueryPlanNode exists (already removed?)")
})
}
/// # Panics
///
/// when node with id does not exist.
pub(crate) fn remove(&self, id: QueryPlanNodeId) -> QueryPlanNode {
self.hmap
.write()
.unwrap()
.remove(&id)
.unwrap_or_else(|| panic!("QueryPlanNode:{:?} does not exist (already removed?)", id))
}
}
|
//!
//! # `Data Section`
//!
//! +------------+----------------+-----------+
//! | | | |
//! | #Layouts | Layout #1 | ... |
//! | (2 bytes) | (see `Layout`) | |
//! | | | |
//! +------------+----------------+-----------+
//!
//!
//! ## `Layout`
//!
//! +--------------+-----------------------------------------+
//! | | |
//! | Layout Kind | Layout Specific Encoding |
//! | (2 bytes) | (see `Fixed Layout`) |
//! | | |
//! +--------------+-----------------------------------------+
//!
//!
//! ### `Fixed Layout`
//!
//! Right now, there is only the `Fixed Layout`
//!
//! When `#Vars > 0`
//! +--------------+----------------------------+-------------------+-------------------+
//! | | | | | |
//! | 0x00_01 | #Vars | First Var Id | Var #0 Byte-Size | ... |
//! | (2 bytes) | (4 bytes) | (4 bytes) | (2 bytes) | |
//! | | | | | |
//! +--------------+-----------+----------------+-------------------+-------------------+
//!
//!
//! When `#Vars = 0`
//! +--------------+-----------+
//! | | |
//! | 0x00_01 | 0 |
//! | (2 bytes) | (4 bytes) |
//! | | |
//! +--------------+-----------+
//!
//!
//!
use std::io::Cursor;
use svm_layout::{FixedLayoutBuilder, Id, Layout, LayoutKind, RawVar};
use svm_types::DataSection;
use crate::section::{SectionDecoder, SectionEncoder};
use crate::{Field, ParseError, ReadExt, WriteExt};
pub const FIXED: u16 = 0x00_01;
impl SectionEncoder for DataSection {
fn encode(&self, w: &mut Vec<u8>) {
// `#Layouts`
encode_layout_count(self.len(), w);
// Encoding each `Layout`
for layout in self.layouts() {
encode_layout(layout, w);
}
}
}
impl SectionDecoder for DataSection {
fn decode(cursor: &mut Cursor<&[u8]>) -> Result<Self, ParseError> {
// `#Layouts`
let layout_count = decode_layout_count(cursor)? as usize;
// Decoding each `Layout`
let mut section = DataSection::with_capacity(layout_count);
for _ in 0..layout_count {
let layout = decode_layout(cursor)?;
section.add_layout(layout);
}
Ok(section)
}
}
fn encode_layout(layout: &Layout, w: &mut Vec<u8>) {
// `Layout Kind`
let kind = layout.kind();
encode_layout_kind(kind, w);
match kind {
LayoutKind::Fixed => {
let layout = layout.as_fixed();
// `#Vars`
let var_count = layout.len();
assert!(var_count < std::u16::MAX as usize);
w.write_u16_be(var_count as u16);
if var_count > 0 {
// `First Var Id`
let first = layout.first();
encode_var_id(first, w);
// Encoding each `Var Byte-Size`
for var in layout.iter() {
encode_var_byte_size(&var, w);
}
}
}
}
}
fn decode_layout(cursor: &mut Cursor<&[u8]>) -> Result<Layout, ParseError> {
// `Layout Kind`
let kind = decode_layout_kind(cursor)?;
match kind {
LayoutKind::Fixed => {
// `#Vars
match cursor.read_u16_be() {
Err(..) => Err(ParseError::NotEnoughBytes(Field::RawVarCount)),
Ok(var_count) => {
let var_count = var_count as usize;
let mut builder = FixedLayoutBuilder::with_capacity(var_count);
if var_count > 0 {
// `First Var Id`
let first = decode_var_id(cursor)?;
builder.set_first(first);
// Decoding each `var`
for _ in 0..var_count {
let byte_size = decode_var_byte_size(cursor)?;
builder.push(byte_size);
}
}
let fixed = builder.build();
let layout = Layout::Fixed(fixed);
Ok(layout)
}
}
}
}
}
fn encode_layout_kind(kind: LayoutKind, w: &mut Vec<u8>) {
let raw = match kind {
LayoutKind::Fixed => FIXED,
};
w.write_u16_be(raw);
}
fn decode_layout_kind(cursor: &mut Cursor<&[u8]>) -> Result<LayoutKind, ParseError> {
let value = cursor.read_u16_be();
if value.is_err() {
return Err(ParseError::NotEnoughBytes(Field::LayoutKind));
}
match value.unwrap() {
FIXED => Ok(LayoutKind::Fixed),
_ => unreachable!(),
}
}
fn encode_layout_count(layout_count: usize, w: &mut Vec<u8>) {
assert!(layout_count < u16::MAX as usize);
w.write_u16_be(layout_count as u16);
}
fn decode_layout_count(cursor: &mut Cursor<&[u8]>) -> Result<u16, ParseError> {
let value = cursor.read_u16_be();
value.map_err(|_| ParseError::NotEnoughBytes(Field::LayoutCount))
}
fn encode_var_id(id: Id, w: &mut Vec<u8>) {
w.write_u32_be(id.0)
}
fn decode_var_id(cursor: &mut Cursor<&[u8]>) -> Result<Id, ParseError> {
match cursor.read_u32_be() {
Ok(id) => Ok(Id(id)),
Err(..) => Err(ParseError::NotEnoughBytes(Field::LayoutFirstVarId)),
}
}
fn encode_var_byte_size(var: &RawVar, w: &mut Vec<u8>) {
w.write_u16_be(var.byte_size() as u16);
}
fn decode_var_byte_size(cursor: &mut Cursor<&[u8]>) -> Result<u32, ParseError> {
match cursor.read_u16_be() {
Ok(byte_size) => Ok(byte_size as u32),
Err(..) => Err(ParseError::NotEnoughBytes(Field::RawVarSize)),
}
}
|
use rltk::{Rltk, RGB};
use specs::prelude::*;
use super::{RunState, gamelog::GameLog, GameClock, WantsToSowSeed, Seed,
Position, Renderable, InPlayerInventory, Name, IsSown};
pub struct SeedSowingSystem {}
impl<'a> System<'a> for SeedSowingSystem {
#[allow(clippy::type_complexity)]
type SystemData = (
Entities<'a>,
WriteStorage<'a, Position>,
ReadStorage<'a, Name>,
WriteStorage<'a, Renderable>,
WriteStorage<'a, WantsToSowSeed>,
WriteStorage<'a, Seed>,
WriteStorage<'a, InPlayerInventory>,
WriteStorage<'a, IsSown>,
WriteExpect<'a, GameLog>
);
fn run(&mut self, data : Self::SystemData) {
let (entities, mut positions, names, mut renderables, mut wants_sow, mut seed, mut inventory, mut is_sown, mut log) = data;
for (entity, sow) in (&entities, &wants_sow).join() {
// Remove seed from player inventory
inventory.remove(sow.seed);
is_sown.insert(sow.seed, IsSown{});
// Add seed to sown seeds
let newx = sow.x1;
let newy = sow.y1;
positions.insert(sow.seed, Position{ x: newx, y: newy});
let seed_name = names.get(sow.seed).unwrap();
match &seed_name.name.to_owned()[..] {
"Apple seed" => renderables.insert(sow.seed, Renderable{
glyph: rltk::to_cp437('a'),
fg: RGB::named(rltk::GREEN),
bg: RGB::named(rltk::BLACK),
}),
"Pear seed" => renderables.insert(sow.seed, Renderable{
glyph: rltk::to_cp437('p'),
fg: RGB::from_f32(0.82, 0.886, 0.192),
bg: RGB::named(rltk::BLACK),
}),
_ => {
log.entries.push(seed_name.name.to_string());
renderables.insert(sow.seed, Renderable{
glyph: rltk::to_cp437('_'),
fg: RGB::named(rltk::BLACK),
bg: RGB::named(rltk::BLACK),
})}
};
let mut log_entry = String::from("Seed sown!");
log.entries.push(log_entry);
}
wants_sow.clear();
}
}
|
use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry;
use crate::data::base::select_fields;
use crate::errors::ShellError;
use crate::prelude::*;
#[derive(Deserialize)]
struct PickArgs {
rest: Vec<Tagged<String>>,
}
pub struct Pick;
impl WholeStreamCommand for Pick {
fn name(&self) -> &str {
"pick"
}
fn signature(&self) -> Signature {
Signature::build("pick").rest(SyntaxShape::Any)
}
fn usage(&self) -> &str {
"Down-select table to only these columns."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, pick)?.run()
}
}
fn pick(
PickArgs { rest: fields }: PickArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
if fields.len() == 0 {
return Err(ShellError::labeled_error(
"Pick requires columns to pick",
"needs parameter",
name,
));
}
let fields: Vec<_> = fields.iter().map(|f| f.item.clone()).collect();
let objects = input
.values
.map(move |value| select_fields(&value.item, &fields, value.tag()));
Ok(objects.from_input_stream())
}
|
use std::io::println;
use triangle_routines::triangle_print;
mod triangle_routines;
static NUM_PEGS:int = 15;
static NUM_MOVES:int = 36;
static moves:[[int, ..3], ..NUM_MOVES] = [
[0, 1, 3],
[0, 2, 5],
[1, 3, 6],
[1, 4, 8],
[2, 4, 7],
[2, 5, 9],
[3, 1, 0],
[3, 4, 5],
[3, 6, 10],
[3, 7, 12],
[4, 7, 11],
[4, 8, 13],
[5, 2, 0],
[5, 4, 3],
[5, 8, 12],
[5, 9, 14],
[6, 3, 1],
[6, 7, 8],
[7, 4, 2],
[7, 8, 9],
[8, 4, 1],
[8, 7, 6],
[9, 5, 2],
[9, 8, 7],
[10, 6, 3],
[10, 11, 12],
[11, 7, 4],
[11, 12, 13],
[12, 7, 3],
[12, 8, 5],
[12, 11, 10],
[12, 13, 14],
[13, 8, 4],
[13, 12, 11],
[14, 9, 5],
[14, 13, 12]
];
fn main() {
let mut board = [false, ..NUM_PEGS];
/* Parse the input, assuming valid input */
triangle_routines::triangle_input(board);
/* Solve the board */
let result = solve(board);
/* If no solution, say so */
if result == false {
println("There are no solutions to the initial position given.");
}
}
/* Return the number of pegs on the board. */
fn num_pegs(board:&[bool]) -> int {
let mut sum = 0;
for peg in board.iter() { sum += *peg as int; }
return sum;
}
/* Return 1 if the move is valid on this board, otherwise return 0. */
fn valid_move(board:&[bool], move:&[int]) -> bool {
return board[move[0]] && board[move[1]] && (!board[move[2]]);
}
/* Make this move on this board. */
fn make_move(board:&mut [bool], move:&[int]) {
board[move[0]] = false;
board[move[1]] = false;
board[move[2]] = true;
}
/* Unmake this move on this board. */
fn unmake_move(board:&mut [bool], move:&[int]) {
board[move[0]] = true;
board[move[1]] = true;
board[move[2]] = false;
}
/*
* Solve the game starting from this board. Return 1 if the game can
* be solved; otherwise return 0. Do not permanently alter the board passed
* in. Once a solution is found, print the boards making up the solution in
* reverse order.
*/
fn solve(board:&mut [bool]) -> bool {
/* Check if the board is already solved. If it is, print it. */
if num_pegs(board) == 1 {
triangle_print(board);
return true;
}
/* For each move we could make, make it,
* and then try to solve the resulting board.
*/
for i in range(0, NUM_MOVES) {
if !valid_move(board, moves[i]) { continue; }
make_move(board, moves[i]);
/* If we find a solution, print the board and return true. */
if solve(board) {
unmake_move(board, moves[i]);
triangle_print(board);
return true;
}
unmake_move(board, moves[i]);
}
/* Return false */
return false;
}
|
// Copyright 2019 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::iter;
use proc_macro2::Ident;
use syn::{
spanned::Spanned,
visit::{self, Visit},
Generics, Result, Type, TypePath,
};
use crate::utils::{self, CustomTraitBound};
/// Visits the ast and checks if one of the given idents is found.
struct ContainIdents<'a> {
result: bool,
idents: &'a [Ident],
}
impl<'a, 'ast> Visit<'ast> for ContainIdents<'a> {
fn visit_ident(&mut self, i: &'ast Ident) {
if self.idents.iter().any(|id| id == i) {
self.result = true;
}
}
}
/// Checks if the given type contains one of the given idents.
fn type_contain_idents(ty: &Type, idents: &[Ident]) -> bool {
let mut visitor = ContainIdents { result: false, idents };
visitor.visit_type(ty);
visitor.result
}
/// Visits the ast and checks if the a type path starts with the given ident.
struct TypePathStartsWithIdent<'a> {
result: bool,
ident: &'a Ident,
}
impl<'a, 'ast> Visit<'ast> for TypePathStartsWithIdent<'a> {
fn visit_type_path(&mut self, i: &'ast TypePath) {
if let Some(segment) = i.path.segments.first() {
if &segment.ident == self.ident {
self.result = true;
return
}
}
visit::visit_type_path(self, i);
}
}
/// Checks if the given type path or any containing type path starts with the given ident.
fn type_path_or_sub_starts_with_ident(ty: &TypePath, ident: &Ident) -> bool {
let mut visitor = TypePathStartsWithIdent { result: false, ident };
visitor.visit_type_path(ty);
visitor.result
}
/// Checks if the given type or any containing type path starts with the given ident.
fn type_or_sub_type_path_starts_with_ident(ty: &Type, ident: &Ident) -> bool {
let mut visitor = TypePathStartsWithIdent { result: false, ident };
visitor.visit_type(ty);
visitor.result
}
/// Visits the ast and collects all type paths that do not start or contain the given ident.
///
/// Returns `T`, `N`, `A` for `Vec<(Recursive<T, N>, A)>` with `Recursive` as ident.
struct FindTypePathsNotStartOrContainIdent<'a> {
result: Vec<TypePath>,
ident: &'a Ident,
}
impl<'a, 'ast> Visit<'ast> for FindTypePathsNotStartOrContainIdent<'a> {
fn visit_type_path(&mut self, i: &'ast TypePath) {
if type_path_or_sub_starts_with_ident(i, self.ident) {
visit::visit_type_path(self, i);
} else {
self.result.push(i.clone());
}
}
}
/// Collects all type paths that do not start or contain the given ident in the given type.
///
/// Returns `T`, `N`, `A` for `Vec<(Recursive<T, N>, A)>` with `Recursive` as ident.
fn find_type_paths_not_start_or_contain_ident(ty: &Type, ident: &Ident) -> Vec<TypePath> {
let mut visitor = FindTypePathsNotStartOrContainIdent { result: Vec::new(), ident };
visitor.visit_type(ty);
visitor.result
}
#[allow(clippy::too_many_arguments)]
/// Add required trait bounds to all generic types.
pub fn add<N>(
input_ident: &Ident,
generics: &mut Generics,
data: &syn::Data,
custom_trait_bound: Option<CustomTraitBound<N>>,
codec_bound: syn::Path,
codec_skip_bound: Option<syn::Path>,
dumb_trait_bounds: bool,
crate_path: &syn::Path,
) -> Result<()> {
let skip_type_params = match custom_trait_bound {
Some(CustomTraitBound::SpecifiedBounds { bounds, .. }) => {
generics.make_where_clause().predicates.extend(bounds);
return Ok(())
},
Some(CustomTraitBound::SkipTypeParams { type_names, .. }) =>
type_names.into_iter().collect::<Vec<_>>(),
None => Vec::new(),
};
let ty_params = generics
.type_params()
.filter_map(|tp| {
skip_type_params.iter().all(|skip| skip != &tp.ident).then(|| tp.ident.clone())
})
.collect::<Vec<_>>();
if ty_params.is_empty() {
return Ok(())
}
let codec_types =
get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds)?;
let compact_types = collect_types(data, utils::is_compact)?
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, &ty_params))
.collect::<Vec<_>>();
let skip_types = if codec_skip_bound.is_some() {
let needs_default_bound = |f: &syn::Field| utils::should_skip(&f.attrs);
collect_types(data, needs_default_bound)?
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, &ty_params))
.collect::<Vec<_>>()
} else {
Vec::new()
};
if !codec_types.is_empty() || !compact_types.is_empty() || !skip_types.is_empty() {
let where_clause = generics.make_where_clause();
codec_types
.into_iter()
.for_each(|ty| where_clause.predicates.push(parse_quote!(#ty : #codec_bound)));
let has_compact_bound: syn::Path = parse_quote!(#crate_path::HasCompact);
compact_types
.into_iter()
.for_each(|ty| where_clause.predicates.push(parse_quote!(#ty : #has_compact_bound)));
skip_types.into_iter().for_each(|ty| {
let codec_skip_bound = codec_skip_bound.as_ref();
where_clause.predicates.push(parse_quote!(#ty : #codec_skip_bound))
});
}
Ok(())
}
/// Returns all types that must be added to the where clause with the respective trait bound.
fn get_types_to_add_trait_bound(
input_ident: &Ident,
data: &syn::Data,
ty_params: &[Ident],
dumb_trait_bound: bool,
) -> Result<Vec<Type>> {
if dumb_trait_bound {
Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect())
} else {
let needs_codec_bound = |f: &syn::Field| {
!utils::is_compact(f) &&
utils::get_encoded_as_type(f).is_none() &&
!utils::should_skip(&f.attrs)
};
let res = collect_types(data, needs_codec_bound)?
.into_iter()
// Only add a bound if the type uses a generic
.filter(|ty| type_contain_idents(ty, ty_params))
// If a struct contains itself as field type, we can not add this type into the where
// clause. This is required to work a round the following compiler bug: https://github.com/rust-lang/rust/issues/47032
.flat_map(|ty| {
find_type_paths_not_start_or_contain_ident(&ty, input_ident)
.into_iter()
.map(Type::Path)
// Remove again types that do not contain any of our generic parameters
.filter(|ty| type_contain_idents(ty, ty_params))
// Add back the original type, as we don't want to loose it.
.chain(iter::once(ty))
})
// Remove all remaining types that start/contain the input ident to not have them in the
// where clause.
.filter(|ty| !type_or_sub_type_path_starts_with_ident(ty, input_ident))
.collect();
Ok(res)
}
}
fn collect_types(data: &syn::Data, type_filter: fn(&syn::Field) -> bool) -> Result<Vec<syn::Type>> {
use syn::*;
let types = match *data {
Data::Struct(ref data) => match &data.fields {
| Fields::Named(FieldsNamed { named: fields, .. }) |
Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),
Fields::Unit => Vec::new(),
},
Data::Enum(ref data) => data
.variants
.iter()
.filter(|variant| !utils::should_skip(&variant.attrs))
.flat_map(|variant| match &variant.fields {
| Fields::Named(FieldsNamed { named: fields, .. }) |
Fields::Unnamed(FieldsUnnamed { unnamed: fields, .. }) =>
fields.iter().filter(|f| type_filter(f)).map(|f| f.ty.clone()).collect(),
Fields::Unit => Vec::new(),
})
.collect(),
Data::Union(ref data) =>
return Err(Error::new(data.union_token.span(), "Union types are not supported.")),
};
Ok(types)
}
|
use nalgebra;
use std::marker::PhantomData;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::thread::{self, ThreadId};
use hibitset::BitSetLike;
use specs::prelude::*;
use specs::storage::{
DenseVecStorage, MaskedStorage, TryDefault, UnprotectedStorage,
};
use specs::world::Index as SpecsIndex;
use specs_derive::Component;
pub use nalgebra::Rotation2;
pub use nalgebra::Rotation3;
pub use nalgebra::Unit;
pub type Perspective3 = nalgebra::Perspective3<f32>;
pub type Point2 = nalgebra::Point2<f32>;
pub type Point3 = nalgebra::Point3<f32>;
pub type Point4 = nalgebra::Point4<f32>;
pub type Matrix4 = nalgebra::Matrix4<f32>;
pub type Vector2 = nalgebra::Vector2<f32>;
pub type Vector3 = nalgebra::Vector3<f32>;
pub type _Vector4 = nalgebra::Vector4<f32>;
pub type Isometry3 = nalgebra::Isometry3<f32>;
pub type Isometry2 = nalgebra::Isometry2<f32>;
pub type Velocity2 = nphysics2d::algebra::Velocity2<f32>;
pub use ncollide2d::query::Ray;
pub type Segment = ncollide2d::shape::Segment<f32>;
pub use ncollide2d::query::ray_internal::ray::RayCast;
unsafe impl<T> Send for ThreadPin<T> {}
unsafe impl<T> Sync for ThreadPin<T> {}
pub fn iso2_iso3(iso2: &Isometry2) -> Isometry3 {
Isometry3::new(
Vector3::new(
iso2.translation.vector.x,
iso2.translation.vector.y,
0f32,
),
Vector3::new(0f32, 0f32, iso2.rotation.angle()),
)
}
/// Allows safely implement Sync and Send for type T
/// panics if called from another thread
#[derive(Component)]
pub struct ThreadPin<T>
where
T: 'static,
{
owner: ThreadId,
inner: T,
}
// impl<T> Component for ThreadPin<T> where T: 'static {
// type Storage = DenseVecStorage<Self>;
// }
impl<T> ThreadPin<T> {
pub fn new(value: T) -> Self {
ThreadPin {
owner: thread::current().id(),
inner: value,
}
}
}
impl<T> Deref for ThreadPin<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
assert!(thread::current().id() == self.owner);
&self.inner
}
}
impl<T> DerefMut for ThreadPin<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
assert!(thread::current().id() == self.owner);
&mut self.inner
}
}
/// Option ThreadPin with deref(panics if None)
/// Allows to implement Default on ThreadPin
#[derive(Default)]
pub struct ThreadPinResource<T>
where
T: 'static,
{
inner: Option<ThreadPin<T>>,
}
impl<T> Deref for ThreadPinResource<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.inner.as_ref().map(|x| x.deref()).unwrap()
}
}
impl<T> DerefMut for ThreadPinResource<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.inner.as_mut().map(|x| x.deref_mut()).unwrap()
}
}
// Retained storage
pub trait Retained<C> {
fn retained(&mut self) -> Vec<C>;
}
impl<'e, T, D> Retained<T> for Storage<'e, T, D>
where
T: Component,
T::Storage: Retained<T>,
D: DerefMut<Target = MaskedStorage<T>>,
{
fn retained(&mut self) -> Vec<T> {
unsafe { self.open().1.retained() }
}
}
pub struct RetainedStorage<C, T = dyn UnprotectedStorage<C>> {
retained: Vec<C>,
storage: T,
phantom: PhantomData<C>,
}
impl<C, T> Default for RetainedStorage<C, T>
where
T: TryDefault,
{
fn default() -> Self {
RetainedStorage {
retained: vec![],
storage: T::try_default().unwrap(),
phantom: PhantomData,
}
}
}
impl<C, T> Retained<C> for RetainedStorage<C, T> {
fn retained(&mut self) -> Vec<C> {
mem::replace(&mut self.retained, vec![])
}
}
impl<C: Clone, T: UnprotectedStorage<C>> UnprotectedStorage<C>
for RetainedStorage<C, T>
{
unsafe fn clean<B>(&mut self, has: B)
where
B: BitSetLike,
{
self.storage.clean(has)
}
unsafe fn get(&self, id: SpecsIndex) -> &C {
self.storage.get(id)
}
unsafe fn get_mut(&mut self, id: SpecsIndex) -> &mut C {
self.storage.get_mut(id)
}
unsafe fn insert(&mut self, id: SpecsIndex, comp: C) {
self.storage.insert(id, comp);
}
unsafe fn remove(&mut self, id: SpecsIndex) -> C {
let comp = self.storage.remove(id);
self.retained.push(comp.clone());
comp
}
}
|
pub fn encode(s : &'static str) -> String {
format!("1{}1", s)
}
#[cfg(test)]
mod tests {
use super::encode;
#[test]
fn should_encode_a_simple_char() {
assert_eq!("1A1", encode("A"));
assert_eq!("1B1", encode("B"));
}
#[test]
fn should_encode_a_simple_sequence() {
assert_eq!("1ABC1",encode("ABC"));
}
}
|
// I'm going to make this non-standard GCode by adding in custom commands not in the standard.
// Non-standard commands
// Q1 -> Change quadrants
#[derive(Debug, PartialEq, Clone)]
pub struct Word {
pub letter: char,
pub value: u16
}
#[derive(Debug, PartialEq, Clone)]
pub struct GCode {
pub command: Word,
pub x: f32,
pub y: f32,
pub z: f32 // Including a Z because some slicers will map a lift as a raising of the Z axis rather than using the lift command. - Austin Haskell
}
impl Word {
pub fn ToString(self: &Self) -> String {
String::from(self.letter.to_string() + &self.value.to_string())
}
}
impl GCode {
pub fn Write(self: &Self) -> String {
let mut line: String = self.command.ToString();
line += " ";
line += &self.x.to_string();
line += " ";
line += &self.y.to_string();
if self.command.letter != 'Q' {
line += " ";
line += &self.z.to_string();
}
line
}
} |
/*
* Copyright 2020 Cargill Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ------------------------------------------------------------------------------
*/
use crate::protos::identity::Policy_EntryType;
use crate::state::identity_view::IdentityView;
use super::{IdentityError, IdentitySource, Permission, Policy, Role};
impl IdentitySource for IdentityView {
fn get_role(&self, name: &str) -> Result<Option<Role>, IdentityError> {
let role = IdentityView::get_role(self, name).map_err(|err| {
IdentityError::ReadError(format!("unable to read role from state: {:?}", err))
})?;
Ok(role.map(|mut role| Role::new(role.take_name(), role.take_policy_name())))
}
fn get_policy_by_name(&self, name: &str) -> Result<Option<Policy>, IdentityError> {
let policy = IdentityView::get_policy(self, name).map_err(|err| {
IdentityError::ReadError(format!("unable to read policy from state: {:?}", err))
})?;
if let Some(mut policy) = policy {
let permissions: Result<Vec<Permission>, IdentityError> = policy
.take_entries()
.into_iter()
.map(|mut entry| match entry.get_field_type() {
Policy_EntryType::PERMIT_KEY => Ok(Permission::PermitKey(entry.take_key())),
Policy_EntryType::DENY_KEY => Ok(Permission::DenyKey(entry.take_key())),
Policy_EntryType::ENTRY_TYPE_UNSET => Err(IdentityError::ReadError(format!(
"policy {} is contains invalid type",
entry.get_key()
))),
})
.collect();
Ok(Some(Policy::new(policy.take_name(), permissions?)))
} else {
Ok(None)
}
}
}
|
mod cpu;
#[cfg(feature = "cuda")]
mod cuda;
|
//! Doctor Syn a computer algebra system for rust macros.
pub mod error;
pub mod expression;
pub mod name;
pub mod polynomial;
pub mod transformation;
pub mod variablelist;
pub mod visitor;
#[cfg(test)]
mod tests;
pub use expression::{Expression, Parity};
pub use name::Name;
pub use std::convert::{TryFrom, TryInto};
pub use variablelist::VariableList;
pub use error::*;
use num_traits::Float;
pub trait Evaluateable:
TryFrom<Expression, Error = error::Error> + TryInto<Expression, Error = error::Error> + Float
{
}
impl Evaluateable for f64 {}
impl Evaluateable for f32 {}
// #[cfg(test)]
// mod test;
// use syn::{parse_macro_input, ExprClosure};
// use quote::quote;
// /// Example:
// /// ```
// /// use doctor_syn::approx;
// /// fn sin4(x: f64) {
// /// approx!(|#[min="0"] #[max="2*PI"] #[terms="4"] x| x.sin());
// /// }
// /// ```
// #[proc_macro]
// pub fn approx(item: TokenStream) -> TokenStream {
// let clos : ExprClosure = parse_macro_input!(item as ExprClosure);
// match approx::do_approx(clos) {
// Ok(res) => quote!(#res).into(),
// Err(e) => { let e = format!("{:?}", e); quote!(#e).into() }
// }
// }
|
extern crate iron;
extern crate persistent;
extern crate router;
extern crate r2d2;
extern crate r2d2_sqlite;
extern crate rusqlite;
extern crate uuid;
use iron::prelude::*;
use iron::status;
use std::io::Read;
pub struct ConnectionPool;
impl iron::typemap::Key for ConnectionPool {
type Value = r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>;
}
struct DatabaseConnection {
conn: r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>,
}
impl DatabaseConnection {
fn new(pool: &r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>) -> DatabaseConnection {
DatabaseConnection { conn: pool.get().unwrap() }
}
fn setup_database(&self) -> Result<i32, rusqlite::Error> {
let query = "CREATE TABLE IF NOT EXISTS pastes (id TEXT, body BLOB)";
self.conn.execute(query, &[])
}
fn insert_paste(&self, body: &str) -> Result<String, rusqlite::Error> {
let id = uuid::Uuid::new_v4().to_string();
let query = "INSERT INTO pastes VALUES ($1, $2)";
self.conn.execute(query, &[&id, &body]).and(Ok(id))
}
fn get_paste_body_by_id(&self, id: &str) -> Option<String> {
let query = "SELECT body FROM pastes WHERE id = $1";
self.conn.query_row(query, &[&id], |row| row.get(0)).ok()
}
}
fn post_pastebin(req: &mut Request) -> IronResult<Response> {
let pool = req.get::<persistent::Read<ConnectionPool>>().unwrap();
let conn = DatabaseConnection::new(&pool);
let mut req_body = String::new();
req.body.read_to_string(&mut req_body).unwrap();
match conn.insert_paste(&req_body) {
Ok(id) => Ok(Response::with((status::Created, format!("{}{}\n", req.url, id)))),
Err(_) => Ok(Response::with((status::ServiceUnavailable, ""))),
}
}
fn get_pastebin(req: &mut Request) -> IronResult<Response> {
let pool = req.get::<persistent::Read<ConnectionPool>>().unwrap();
let conn = DatabaseConnection::new(&pool);
let id = req.extensions.get::<router::Router>().unwrap().find("id").unwrap();
match conn.get_paste_body_by_id(&id) {
Some(body) => Ok(Response::with((status::Ok, body))),
None => Ok(Response::with((status::NotFound, ""))),
}
}
fn main() {
let mut router = router::Router::new();
router.post("/", post_pastebin, "post_pastebin");
router.get("/:id", get_pastebin, "get_pastebin");
let config = r2d2::Config::default();
let manager = r2d2_sqlite::SqliteConnectionManager::new("./db.sqlite3");
let pool = r2d2::Pool::new(config, manager).unwrap();
DatabaseConnection::new(&pool).setup_database().unwrap();
let mut middleware = Chain::new(router);
middleware.link_before(persistent::Read::<ConnectionPool>::one(pool));
Iron::new(middleware).http("localhost:3000").unwrap();
}
|
use once_cell::sync::OnceCell;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::error::Error;
/// An abstraction for regex patterns.
///
/// * Allows swapping out the regex implementation because it's only in this module.
/// * Makes regexes serializable and deserializable using just the pattern string.
/// * Lazily compiles regexes on first use to improve initialization time.
#[derive(Debug)]
pub struct Regex {
regex_str: String,
regex: OnceCell<regex_impl::Regex>,
}
/// A region contains text positions for capture groups in a match result.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Region {
region: regex_impl::Region,
}
impl Regex {
/// Create a new regex from the pattern string.
///
/// Note that the regex compilation happens on first use, which is why this method does not
/// return a result.
pub fn new(regex_str: String) -> Self {
Self {
regex_str,
regex: OnceCell::new(),
}
}
/// Check whether the pattern compiles as a valid regex or not.
pub fn try_compile(regex_str: &str) -> Option<Box<dyn Error + Send + Sync + 'static>> {
regex_impl::Regex::new(regex_str).err()
}
/// Return the regex pattern.
pub fn regex_str(&self) -> &str {
&self.regex_str
}
/// Check if the regex matches the given text.
pub fn is_match(&self, text: &str) -> bool {
self.regex().is_match(text)
}
/// Search for the pattern in the given text from begin/end positions.
///
/// If a region is passed, it is used for storing match group positions. The argument allows
/// the [`Region`] to be reused between searches, which makes a significant performance
/// difference.
///
/// [`Region`]: struct.Region.html
pub fn search(
&self,
text: &str,
begin: usize,
end: usize,
region: Option<&mut Region>,
) -> bool {
self.regex()
.search(text, begin, end, region.map(|r| &mut r.region))
}
fn regex(&self) -> ®ex_impl::Regex {
self.regex.get_or_init(|| {
regex_impl::Regex::new(&self.regex_str).expect("regex string should be pre-tested")
})
}
}
impl Clone for Regex {
fn clone(&self) -> Self {
Regex {
regex_str: self.regex_str.clone(),
regex: OnceCell::new(),
}
}
}
impl PartialEq for Regex {
fn eq(&self, other: &Regex) -> bool {
self.regex_str == other.regex_str
}
}
impl Eq for Regex {}
impl Serialize for Regex {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.regex_str)
}
}
impl<'de> Deserialize<'de> for Regex {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let regex_str = String::deserialize(deserializer)?;
Ok(Regex::new(regex_str))
}
}
impl Region {
pub fn new() -> Self {
Self {
region: regex_impl::new_region(),
}
}
/// Get the start/end positions of the capture group with given index.
///
/// If there is no match for that group or the index does not correspond to a group, `None` is
/// returned. The index 0 returns the whole match.
pub fn pos(&self, index: usize) -> Option<(usize, usize)> {
self.region.pos(index)
}
}
impl Default for Region {
fn default() -> Self {
Self::new()
}
}
#[cfg(feature = "regex-onig")]
mod regex_impl {
pub use onig::Region;
use onig::{MatchParam, RegexOptions, SearchOptions, Syntax};
use std::error::Error;
#[derive(Debug)]
pub struct Regex {
regex: onig::Regex,
}
pub fn new_region() -> Region {
Region::with_capacity(8)
}
impl Regex {
pub fn new(regex_str: &str) -> Result<Regex, Box<dyn Error + Send + Sync + 'static>> {
let result = onig::Regex::with_options(
regex_str,
RegexOptions::REGEX_OPTION_CAPTURE_GROUP,
Syntax::default(),
);
match result {
Ok(regex) => Ok(Regex { regex }),
Err(error) => Err(Box::new(error)),
}
}
pub fn is_match(&self, text: &str) -> bool {
self.regex
.match_with_options(text, 0, SearchOptions::SEARCH_OPTION_NONE, None)
.is_some()
}
pub fn search(
&self,
text: &str,
begin: usize,
end: usize,
region: Option<&mut Region>,
) -> bool {
let matched = self.regex.search_with_param(
text,
begin,
end,
SearchOptions::SEARCH_OPTION_NONE,
region,
MatchParam::default(),
);
// If there's an error during search, treat it as non-matching.
// For example, in case of catastrophic backtracking, onig should
// fail with a "retry-limit-in-match over" error eventually.
matches!(matched, Ok(Some(_)))
}
}
}
// If both regex-fancy and regex-onig are requested, this condition makes regex-onig win.
#[cfg(all(feature = "regex-fancy", not(feature = "regex-onig")))]
mod regex_impl {
use std::error::Error;
#[derive(Debug)]
pub struct Regex {
regex: fancy_regex::Regex,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Region {
positions: Vec<Option<(usize, usize)>>,
}
pub fn new_region() -> Region {
Region {
positions: Vec::with_capacity(8),
}
}
impl Regex {
pub fn new(regex_str: &str) -> Result<Regex, Box<dyn Error + Send + Sync + 'static>> {
let result = fancy_regex::Regex::new(regex_str);
match result {
Ok(regex) => Ok(Regex { regex }),
Err(error) => Err(Box::new(error)),
}
}
pub fn is_match(&self, text: &str) -> bool {
// Errors are treated as non-matches
self.regex.is_match(text).unwrap_or(false)
}
pub fn search(
&self,
text: &str,
begin: usize,
end: usize,
region: Option<&mut Region>,
) -> bool {
// If there's an error during search, treat it as non-matching.
// For example, in case of catastrophic backtracking, fancy-regex should
// fail with an error eventually.
if let Ok(Some(captures)) = self.regex.captures_from_pos(&text[..end], begin) {
if let Some(region) = region {
region.init_from_captures(&captures);
}
true
} else {
false
}
}
}
impl Region {
fn init_from_captures(&mut self, captures: &fancy_regex::Captures) {
self.positions.clear();
for i in 0..captures.len() {
let pos = captures.get(i).map(|m| (m.start(), m.end()));
self.positions.push(pos);
}
}
pub fn pos(&self, i: usize) -> Option<(usize, usize)> {
if i < self.positions.len() {
self.positions[i]
} else {
None
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn caches_compiled_regex() {
let regex = Regex::new(String::from(r"\w+"));
assert!(regex.regex.get().is_none());
assert!(regex.is_match("test"));
assert!(regex.regex.get().is_some());
}
#[test]
fn serde_as_string() {
let pattern: Regex = serde_json::from_str("\"just a string\"").unwrap();
assert_eq!(pattern.regex_str(), "just a string");
let back_to_str = serde_json::to_string(&pattern).unwrap();
assert_eq!(back_to_str, "\"just a string\"");
}
}
|
use aoc20::days::day8;
#[test]
fn day8_parse() {
assert_eq!(day8::Instruction::parse("nop +0"),
day8::Instruction::new(String::from("nop"), 0)
);
assert_eq!(day8::Instruction::parse("acc -99"),
day8::Instruction::new(String::from("acc"), -99)
);
assert_eq!(day8::Instruction::parse("jmp +4"),
day8::Instruction::new(String::from("jmp"), 4)
);
}
|
use std::iter::Iterator;
use std::vec::Vec;
pub struct BufferedIterator<T, TIter: Iterator<Item = T>> {
itr: TIter,
buf: Vec<T>,
}
impl<T, TIter: Iterator<Item = T>> BufferedIterator<T, TIter> {
pub fn new(itr: TIter) -> BufferedIterator<T, TIter> {
BufferedIterator {
itr,
buf: Vec::new(),
}
}
pub fn pop(&mut self) -> Option<T> {
if self.buf.is_empty() {
self.itr.next()
} else {
self.buf.pop()
}
}
pub fn push(&mut self, item: T) {
self.buf.push(item);
}
}
|
use bonuses;
/// An object which tracks dodge bonus values.
pub struct DodgeBonus {
tracker: bonuses::StackingTracker,
}
impl DodgeBonus {
/// Create an instance of DodgeBonus.
pub fn new() -> DodgeBonus {
DodgeBonus {
tracker: bonuses::StackingTracker::new()
}
}
}
impl bonuses::BonusTracker for DodgeBonus {
/// Returns the total bonus.
fn total(&self) -> u8 {
return self.tracker.total();
}
/// Adds a value.
fn add(&mut self, amt: u8) -> &mut DodgeBonus {
self.tracker.add(amt);
return self;
}
/// Removes a value.
fn remove(&mut self, amt: u8) -> &mut DodgeBonus {
self.tracker.remove(amt);
return self;
}
} |
extern crate atty;
extern crate rayon;
extern crate ring;
extern crate serde_json;
extern crate solana;
use atty::{is, Stream};
use solana::mint::{Mint, MintDemo};
use std::io;
use std::process::exit;
fn main() {
let mut input_text = String::new();
if is(Stream::Stdin) {
eprintln!("nothing found on stdin, expected a token number");
exit(1);
}
io::stdin().read_line(&mut input_text).unwrap();
let trimmed = input_text.trim();
let tokens = trimmed.parse::<i64>().unwrap();
let mint = Mint::new(tokens);
let tokens_per_user = 1_000;
let num_accounts = tokens / tokens_per_user;
let demo = MintDemo { mint, num_accounts };
println!("{}", serde_json::to_string(&demo).unwrap());
}
|
use crate::DATABASE;
// use rusqlite::NO_PARAMS;
use rusqlite::{Connection, Result};
#[derive(Serialize, Deserialize, Debug)]
pub struct User {
pub id: Option<String>,
pub public_key: String,
pub token: String,
pub platform: String,
}
// Implements user
impl User {
// checks if the user exists
// true: if the user does exist
// false: if the user doesn't exist
pub fn exists(&self) -> Result<bool> {
let mut count = 0;
let conn = Connection::open(DATABASE.to_owned())?;
let mut stmt = conn.prepare("SELECT COUNT(*) FROM users WHERE public_key = ?1")?;
stmt.query_row(&[&self.public_key], |row| Ok(count = row.get(0)?))?;
if count > 0 {
return Ok(true);
}
return Ok(false);
}
// get the users public key
pub fn get_key(id: &String) -> Result<(String)> {
let conn = Connection::open(DATABASE.to_owned())?;
let mut stmt = conn.prepare(
"SELECT public_key FROM users WHERE device_id = ?1 ORDER BY id DESC LIMIT 1",
)?;
Ok(stmt.query_row(&[&id], |row| row.get(0))?)
}
// insert a new user into the database
pub fn insert(&self) -> Result<()> {
let conn = Connection::open(DATABASE.to_owned())?;
conn.execute(
"INSERT INTO users (device_id, public_key, token, platform) values (?1, ?2, ?3, ?4)",
&[
self.id.as_ref().unwrap(),
&self.public_key,
&self.token,
&self.platform,
],
)?;
Ok(())
}
// Generate a random ID for a user based on their public key
pub fn generate_id(&mut self) -> std::result::Result<i32, &str> {
let pk_length = self.public_key.chars().count();
if pk_length < 6 {
return Err("invalid public key");
}
use rand::Rng;
let charset: &[u8] = self.public_key.as_bytes();
let mut rng = rand::thread_rng();
// BY default we assume that this key
// already exists in the database until we're proven wrong
let mut exists = true;
// We check every iteration of the device_id
// to see if it exists in the database already,
// this ensures uniqueness
while exists == true {
let device_id: String = (0..crate::USER_ID_LENGTH)
.map(|_| {
let idx = rng.gen_range(0, charset.len());
// This is safe because `idx` is in range of `charset`
char::from(unsafe { *charset.get_unchecked(idx) })
})
.collect();
// check if it exists in the database
match device_id_exists(&device_id) {
Ok(result) => {
exists = result;
if result == false {
self.id = std::prelude::v1::Option::Some(device_id);
}
}
Err(_) => {
return Err("cannot connect to sqlite");
}
}
}
// If not, set and then return OK
return Ok(0);
}
}
//// Checks if both the from and the to device exist or not
//// true: Both exist
//// false: At least one doesn't exist
//pub fn check_from_to_exists(from: &String, to: &String) -> Result<(), String> {
// // Check if the from device ID exists
// match device_id_exists(from) {
// Ok(result) => {
// if result == false {
// return Err("from: ".to_owned() + from + " doesn\'t exist");
// }
// }
// Err(err) => {
// return Err(err.to_string());
// }
// };
//
// // Check if the to device ID exists
// match device_id_exists(to) {
// Ok(result) => {
// if result == false {
// return Err( "to: ".to_owned() + to + " doesn\'t exist");
// }
// }
// Err(err) => {
// return Err(err.to_string());
// }
// };
//
// Ok(())
//}
// checks if the device_id exists
// true: if the device_id does exist
// false: if the device_id doesn't exist
pub fn device_id_exists(id: &String) -> Result<bool> {
let mut count = 0;
let conn = Connection::open(DATABASE.to_owned())?;
let mut stmt = conn.prepare("SELECT COUNT(*) FROM users WHERE device_id = ?1")?;
stmt.query_row(&[id], |row| Ok(count = row.get(0)?))?;
if count > 0 {
return Ok(true);
}
return Ok(false);
}
|
#[cfg(test)]
mod test {
use nom::IResult;
use parser::program;
use std::collections::HashMap;
use ast::{Datatype, TypeInfo, VariableStore};
use test::Bencher;
use std::rc::Rc;
#[test]
fn program_parse_and_execute_integration_test_1() {
let mut map: VariableStore = VariableStore::new();
let input_string = "
let x := 7
fn test_function ( a : Number ) -> Number { a + 8 }
test_function(x)";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(15), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_parse_and_execute_integration_test_2() {
let mut map: VariableStore = VariableStore::new();
let input_string = "
fn test_function ( a : Number ) -> Number { a + 8 }
test_function(8)";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(16), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_parse_and_execute_integration_test_3() {
let mut map: VariableStore = VariableStore::new();
let input_string = "
fn test_function ( a : Number ) -> Number { a + 8 }
test_function( 6 + 2 )";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(16), *ast.evaluate(&mut map).unwrap());
}
/// Test multiple line functions
#[test]
fn program_parse_and_execute_integration_test_4() {
let mut map: VariableStore = VariableStore::new();
let input_string = "
fn test_function ( a : Number ) -> Number {
a + 8
}
test_function(8)";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(16), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_multiple_parameter_function_integration_test() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = "
fn add_two_numbers ( a : Number, b : Number) -> Number {
a + b
}
add_two_numbers(8, 3)";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(11), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_function_internals_does_not_clobber_outer_stack_integration_test() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = "
let a := 2
fn add_two_numbers ( a : Number, b : Number) -> Number {
let a := a + b
a
}
add_two_numbers(8, 3)
a
";
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(2), *ast.evaluate(&mut map).unwrap());
}
/// Test the assignment of a string, then passing it into a function that takes a string.
/// The function should then add a number to the string, creating a new string.
#[test]
fn program_string_coercion_integration_test() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let x := "Hi "
fn test_function ( a : String ) -> String { a + 5 }
test_function(x)"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(
Datatype::String("Hi 5".to_string()),
*ast.evaluate(&mut map).unwrap()
);
}
#[test]
fn program_if_test() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
// the while loop should increment the x once
let input_string = r##"
let x := 3
if x == 3 {
let x := 40
}
x"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(40), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_while_loop_test() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
// the while loop should reassign x to be something else;
let input_string = r##"
let x := 3
while x == 3 {
let x := 40
}
x"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(40), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_while_loop_false_test() {
let mut map: VariableStore = VariableStore::new();
// the while body should not execute
let input_string = r##"
let x := 42
while x == 3 {
let x := x + 1
}
x"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(42), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_parse_literal_test() {
let mut map: VariableStore = VariableStore::new();
// the while body should not execute
let input_string = r##"
32
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(32), *ast.evaluate(&mut map).unwrap());
}
#[test]
fn program_parse_and_verify_array_test() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
[23, 43, 11]
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(
Datatype::Array {
value: vec![
Rc::new(Datatype::Number(23)),
Rc::new(Datatype::Number(43)),
Rc::new(Datatype::Number(11)),
],
type_: TypeInfo::Number,
},
*ast.evaluate(&mut map).unwrap()
);
}
#[test]
fn program_parse_struct_and_something_after_it() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
}
3 + 3
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(6), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_parse_struct_and_access_field() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
}
let instance := new MyStruct {
a: 8
}
instance.a
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(8), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_parse_struct_and_access_field_via_assignment() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
}
let instance := new MyStruct {
a: 8
}
let value_from_struct := instance.a
value_from_struct
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(8), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_parse_struct_with_multiple_fields_and_access_fields_in_expression() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
let instance := new MyStruct {
a: 8
b: 10
}
instance.a + instance.b * instance.b
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(180), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_parse_struct_with_multiple_fields_and_access_fields_in_function() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
let instance := new MyStruct {
a: 8
b: 10
}
fn addContents( s: MyStruct ) -> Number {
s.a + s.b
}
addContents( instance )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(18), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_parse_struct_with_multiple_fields_and_return_struct_from_function_with_internal_assignment(){
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
fn create_new_MyStruct( value: Number ) -> MyStruct {
let c := new MyStruct {
a: 8
b: value
}
c
}
create_new_MyStruct( 3 )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
let mut struct_map: HashMap<String, Datatype> = HashMap::new();
struct_map.insert("a".to_string(), Datatype::Number(8));
struct_map.insert("b".to_string(), Datatype::Number(3));
assert_eq!(
Datatype::Struct { map: struct_map },
*ast.evaluate(&mut map).unwrap()
)
}
#[test]
fn program_parse_struct_with_multiple_fields_and_return_struct_from_function() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
fn create_new_MyStruct( value: Number ) -> MyStruct {
new MyStruct {
a: 8
b: value
}
}
create_new_MyStruct( 3 )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
let mut struct_map: HashMap<String, Datatype> = HashMap::new();
struct_map.insert("a".to_string(), Datatype::Number(8));
struct_map.insert("b".to_string(), Datatype::Number(3));
assert_eq!(
Datatype::Struct { map: struct_map },
*ast.evaluate(&mut map).unwrap()
)
}
#[test]
fn program_verify_that_struct_maps_dont_interfere_with_global_stack_map() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
let a := 3
fn create_new_MyStruct( value: Number ) -> MyStruct {
new MyStruct {
a: 8
b: value
}
}
create_new_MyStruct( a )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
let mut struct_map: HashMap<String, Datatype> = HashMap::new();
struct_map.insert("a".to_string(), Datatype::Number(8));
struct_map.insert("b".to_string(), Datatype::Number(3));
assert_eq!(
Datatype::Struct { map: struct_map },
*ast.evaluate(&mut map).unwrap()
)
}
#[test]
fn program_verify_that_struct_maps_dont_interfere_with_function_maps() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
let a := 3
fn create_new_MyStruct( a: Number ) -> MyStruct {
new MyStruct {
a: 8
b: a
}
}
create_new_MyStruct( a )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
let mut struct_map: HashMap<String, Datatype> = HashMap::new();
struct_map.insert("a".to_string(), Datatype::Number(8));
struct_map.insert("b".to_string(), Datatype::Number(3));
assert_eq!(
Datatype::Struct { map: struct_map },
*ast.evaluate(&mut map).unwrap()
)
}
#[test]
fn program_with_struct_functions_integration_test() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
struct MyStruct {
a : Number
b : Number
}
let a := 3
fn create_new_MyStruct( value: Number ) -> MyStruct {
new MyStruct {
a: 8
b: value
}
}
fn addContents( s: MyStruct ) -> Number {
s.a + s.b
}
let instance := create_new_MyStruct( a )
addContents( instance )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(11), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_with_a_conditional_in_a_function_integration_test() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let a := 3
fn check_if_three( x: Number ) -> Number {
if x == 3 {
3
} else {
0
}
}
check_if_three( a )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(3), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn program_with_a_conditional_in_a_function_2_integration_test() {
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let a := 2
fn check_if_three( x: Number ) -> Number {
if x == 3 {
3
} else {
0
}
}
check_if_three( a )
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(0), *ast.evaluate(&mut map).unwrap())
}
#[test]
fn for_loop_eval() {
let mut map: VariableStore = VariableStore::new();
let input_string = r#"
let b := 0
for i in [1,2,3] {
let b := b + i
}
b
"#;
use std_functions::add_std_functions;
use parser::program;
add_std_functions(&mut map);
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(6), *ast.evaluate(&mut map).unwrap());
}
mod benches {
use super::*;
#[bench]
fn simple_program_execute_bench(b: &mut Bencher) {
use super::super::super::test_constants::SIMPLE_PROGRAM_INPUT_1;
let mut map: VariableStore = VariableStore::new();
let (_, ast) = match program(SIMPLE_PROGRAM_INPUT_1.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
b.iter(|| {
assert_eq!(Datatype::Number(15), *ast.evaluate(&mut map).unwrap())
})
}
#[bench]
fn simple_program_parse_and_execute_bench(b: &mut Bencher) {
use super::super::super::test_constants::SIMPLE_PROGRAM_INPUT_1;
b.iter(|| {
let mut map: VariableStore = VariableStore::new();
let (_, ast) = match program(SIMPLE_PROGRAM_INPUT_1.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(15), *ast.evaluate(&mut map).unwrap())
})
}
#[bench]
fn while_loop_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let x := 0
while x < 1000 {
let x := x + 1
}
x
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(1000), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn while_loop_with_useless_conditionals_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let x := 0
while x < 1000 {
1 * 3
1 * 40000
34234 % 7
let x := x + 1
}
x
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(1000), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn for_loop_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
let x := [1..1001]
for a in x {
let c := c + a
}
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(500500), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn for_loop_alt_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
for a in [1..1001] {
let c := c + a
}
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(500500), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn while_loop_similar_to_for_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
let index := 0
let array := [1..1001]
while index < 1000 {
let part := array[index]
let c := c + part
let index := index + 1
}
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(500500), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn while_loop_similar_to_for_no_array_access_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
let index := 0
let array := [1..1001]
while index < 1000 {
let c := c + 1
let index := index + 1
}
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(1000), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn while_loop_similar_to_for_no_array_access_with_id_conditional_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
let index := 0
let array := [1..1001]
let length := 1000
while index < length {
let c := c + 1
let index := index + 1
}
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(1000), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn similar_to_for_no_array_access_without_while_loop_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let c := 0
let index := 0
let array := [1..1001]
let length := 1000
c
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(0), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn array_range_then_access_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let a := [1..1001]
a[0]
5
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(Datatype::Number(5), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
#[bench]
fn array_range_parse_and_execute_program_bench(b: &mut Bencher) {
fn loop_1000_times_program() {
use nom::IResult;
let mut map: VariableStore = VariableStore::new();
let input_string = r##"
let a := [1..1001]
5
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
ast.evaluate(&mut map).unwrap();
//assert_eq!(Datatype::Number(1), *ast.evaluate(&mut map).unwrap());
}
b.iter(|| loop_1000_times_program());
}
}
}
|
/*
eval.rs: holds the functionality of the evaluator of the Interpreter!
The module "eval" is a submodule of "main" and contains the two functions eval and apply which are
the core of the Interpreter. "eval" is the Interface to the Evaluator and is everything that needs
to be called to evaluate an expression.
*/
// load important functionality of other sibling-modules
use super::env::{new_env, search, set, RlEnv};
use super::types::{error, RlReturn, RlType};
use crate::env::new_env_bound;
// load needed Rust modules
use std::rc::Rc;
/**
Is the core function of the Interpreter, it takes an AST and tries to evaluate it.
1. Check if given AST is a List
1.1. If its a List, look what the first element of the List is
1.1.1 If it's a special form, act accordingly(special forms have individual, non
standard evaluation behaviour)
1.1.2. If It's no special form treat the first argument as function, treat the rest of
the elements as arguments of this function -> first evaluate arguments and then
apply to function(that's normal evaluation behaviour)
1.2 If its an empty list just return (nothing to evaluate)
2. If given AST is no List, then it's atomic
2.1 Symbols will be looked up in the environment
2.2 Integers, Bool, Nil and Strings are self-evaluating
Arguments: expression - Abstract Syntax Tree(AST) that represents the expression to evaluate
environment - the environment the expression is evaluated in
Returns: of type RlReturn - in case of an Error, is RLError otherwise the resulting AST (result of the whole evaluation)
*/
pub fn eval(expression: RlType, environment: RlEnv) -> RlReturn {
match expression.clone() {
// If given expression is a List
RlType::List(content) => {
// if list is empty return empty list back unchanged
return if content.len() == 0 {
Ok(expression)
} else {
// if list is not empty first check first element if it is Symbol triggering special form
let switcher = &content[0];
match switcher {
// quote special form: takes exactly one argument and don't evaluates it
RlType::Symbol(s) if s == "quote" => Ok(content[1].clone()),
// eval special form: takes exactly one argument and evaluates is (needed for Homoiconicity)
RlType::Symbol(s) if s == "eval" => {
eval(content[1].clone(), environment.clone())
}
// cond special form: takes a list of pairs. Each pair has a predicate and an according
// expression. Predicates are evaluated in order and the expression for
// the first predicate to be true is evaluated.
RlType::Symbol(s) if s == "cond" => {
let pairs = content[1..].to_vec().clone();
// iterate over pairs
for pair in pairs.iter() {
match pair {
// if we have valid pair evaluate predicate
RlType::List(l) if l.len() == 2 => {
match eval(l[0].clone(), environment.clone())? {
RlType::Bool(true) => {
return eval(l[1].clone(), environment.clone());
}
_ => {
continue;
}
}
},
// else pattern is invalid
_ => return Err(error("Error: Wrong pattern for cond")),
}
}
// if no predicate is evaluated to be true, return nil
return Ok(RlType::Nil);
}
// define special form: takes a symbol-name and a target expression and maps the
// symbol-name to the (evaluated)expression in current
// environment. if symbol-name already defined, target is overwritten.
RlType::Symbol(s) if s == "define" => {
return if content[1..].len() != 2 {
Err(error("Error: define takes exactly 2 ars"))
} else {
let key = match &content[1] {
RlType::Symbol(s) => s.to_string(),
_ => return Err(error("first arg of define must be a symbol")),
};
let target = eval(content[2].clone(), environment.clone())?;
set(&environment, key.clone(), target.clone());
Ok(target)
}
}
// let special form: takes a list of pairs and an expression. The list of pairs
// contains (symbol, value) pairs that will be defined in the
// expression that was given as second environment.
// Creates a new environment with the bindings and evaluates expression.
RlType::Symbol(s) if s == "let" => {
return if content[1..].len() != 2 {
Err(error("Error: let takes exactly 2 args!"))
} else {
// check if first argument is a list
let bindings_list = match &content[1] {
RlType::List(l) => Ok(l),
_ => Err(error("Error: Arguments of binding lists must be pairs!")),
}?;
// create new sub-environment with current environment as outer environment
let new_env = new_env(Some(environment));
// iterate over present binding pairs in the given List
for binding in bindings_list.iter() {
// check if element is a pair
let b = match &binding {
RlType::List(l) if l.len() == 2 => Ok(l),
_ => Err(error(
"Error: bindings in let needs to be lists of len 2",
)),
}?;
// check if first element of pair is a symbol-name
let key = match &b[0] {
RlType::Symbol(s) => s.to_string(),
_ => return Err(error("first arg of define must be a symbol")),
};
// map symbol to evaluated value in the new environment
set(&new_env, key, eval(b[1].clone(), new_env.clone())?);
}
// Evaluate body with new environment
eval(content[2].clone(), new_env.clone())
};
}
// load special form: takes exactly one argument which is a string. This string
// will be treated as filename. Try to load and evaluate content
// of the file using the load() function in main.rs
RlType::Symbol(s) if s == "load" => {
// check if we have exactly one argument
if content[1..].len() != 1 {
return Err(error("load needs exactly one argument which is a string"));
}
// check if filename is a string
let filename = match &content[1] {
RlType::String(s) => s,
_ => return Err(error("load a string as argument!")),
};
// use load() in main.rs to process file
super::load(filename, environment.clone());
// return nil since something needs to be returned
Ok(RlType::Nil)
}
// do special form: takes a list of expressions, evaluates them in-order and
// returns the value of the last expression evaluated.
RlType::Symbol(s) if s == "do" => {
// evaluate every expression except the last one
for expression in content[1..content.len() - 1].iter() {
let _ = eval(expression.clone(), environment.clone());
}
// evaluate last expression and return its value
return eval(
content.last().unwrap_or(&RlType::Nil).clone(),
environment.clone(),
);
}
// lambda special form: takes two arguments, a list of formal arguments and an expression
// lambda then creates a function in which the arguments are bound
// to the according symbols in the given expression(=body of function)
RlType::Symbol(s) if s == "lambda" => {
// check if we have a list and an expression
match (content[1].clone(), content[2].clone()) {
(RlType::List(l1), body) => {
// return function object. Stores environment at time of creation
Ok(RlType::SelfDefinedFunc {
env: environment,
params: Rc::new(l1),
body: Rc::new(body)
})
},
_ => Err(error("Error: lambda takes a list of parameters and an s-expression as body!"))
}
}
_ => {
// Else evaluate every subexpression of the list and apply
let mut evaluated = Vec::new();
for element in content.iter() {
evaluated.push(eval(element.clone(), environment.clone())?);
}
apply(evaluated)
}
}
};
}
// If given expression is no List but a Symbol, look up symbol in environment
RlType::Symbol(s) => Ok(search(&environment, s)?),
// Else given expression is self-evaluating
_ => Ok(expression.clone()),
}
}
/**
This function is a helper for the eval function. It takes a list of expressions(that are already
evaluated), treat the first expression as function and apply the given expressions to the function.
Here we can ignore case of empty list because that case is caught in eval.
Arguments: args - the list of expressions
Returns: The value evaluated by the function application or an Error.
*/
pub fn apply(args: Vec<RlType>) -> RlReturn {
let func = args[0].clone();
// check if first argument is a function
match func {
// if its a function that is defined in Rust(part of the StdLib) just call it with the arguments
RlType::Func(i) => i(args[1..].to_vec()),
// if its a self defined function(in RLisp), evaluate the function body after binding parameters
RlType::SelfDefinedFunc {
env: stored_env,
params: temp_params,
body: temp_body,
} => {
let params = &*temp_params;
let body = &*temp_body;
// create function environment and bind given parameters to formal arguments of function
let function_environment =
new_env_bound(Some(stored_env.clone()), params.clone(), args[1..].to_vec())?;
// then evaluate function body with new environment
eval(body.clone(), function_environment.clone())
}
_ => Err(error("Expected Function to apply!")),
}
}
|
mod rand_no_trades;
mod real_player_cli;
extern crate lazy_static;
use crate::game::GameState;
use crate::types::*;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use serde_json;
use std::collections::HashMap;
use std::sync::Mutex;
type StrategyConstructor = fn() -> Box<dyn PlayerStrategy>;
lazy_static! {
static ref REGISTRY: Mutex<HashMap<String, StrategyConstructor>> = Mutex::new(HashMap::new());
}
const _DEFAULT_PLAYER_TYPE: &str = "PlayerNoTrades";
#[derive(Serialize, Deserialize)]
pub struct PlayerConfig {
player_type: String,
#[serde(default)]
config: serde_json::Value,
}
pub trait PlayerStrategy {
// Initialize the player from the given config.
fn init(&mut self, player_id: PlayerId, value: &serde_json::Value);
// Reset the player to the most recent init() state.
fn reset(&mut self);
fn propose_trades_as_lead(&mut self, game_state: &GameState) -> HashMap<PlayerId, Trade>;
fn propose_trade_as_non_lead(&mut self, game_state: &GameState) -> Option<Trade>;
fn accept_trades_as_lead(&mut self, game_state: &GameState) -> Vec<bool>;
fn accept_trades_as_non_lead(&mut self, game_state: &GameState, trade: &Trade) -> bool;
}
pub fn register_strategy(player_type: &str, constructor: StrategyConstructor) {
REGISTRY
.lock()
.unwrap()
.insert(player_type.to_string(), constructor);
}
pub fn load_strategies(
configs: &Vec<PlayerConfig>,
num_players: usize,
) -> Vec<Box<dyn PlayerStrategy>> {
let mut strategies: Vec<Box<dyn PlayerStrategy>> = Vec::new();
assert!(configs.len() <= num_players);
for i in 0..num_players {
strategies.push(if i < configs.len() {
let config = &configs[i];
let mut strategy = REGISTRY
.lock()
.unwrap()
.get(&config.player_type)
.expect(&format!("unknown player_type \"{}\"", &config.player_type))(
);
strategy.init(i, &config.config);
strategy
} else {
// default
REGISTRY.lock().unwrap()[_DEFAULT_PLAYER_TYPE]()
})
}
strategies
}
|
use super::ecdsa::*;
use super::eddsa::*;
use super::error::*;
use super::handles::*;
use super::rsa::*;
use super::signature_op::*;
use super::signature_publickey::*;
use super::WASI_CRYPTO_CTX;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[repr(u16)]
pub enum KeyPairEncoding {
Raw = 1,
PKCS8 = 2,
DER = 3,
PEM = 4,
}
#[derive(Clone, Debug)]
pub enum SignatureKeyPair {
ECDSA(ECDSASignatureKeyPair),
EdDSA(EdDSASignatureKeyPair),
RSA(RSASignatureKeyPair),
}
impl SignatureKeyPair {
fn export(&self, encoding: KeyPairEncoding) -> Result<Vec<u8>, Error> {
let encoded = match encoding {
KeyPairEncoding::PKCS8 => match self {
SignatureKeyPair::ECDSA(kp) => kp.as_pkcs8()?.to_vec(),
SignatureKeyPair::EdDSA(kp) => kp.as_pkcs8()?.to_vec(),
SignatureKeyPair::RSA(kp) => kp.as_pkcs8()?.to_vec(),
},
_ => bail!(CryptoError::NotAvailable),
};
Ok(encoded)
}
fn generate(kp_builder_handle: Handle) -> Result<Handle, Error> {
let kp_builder = WASI_CRYPTO_CTX
.signature_keypair_builder_manager
.get(kp_builder_handle)?;
let handle = match kp_builder {
SignatureKeyPairBuilder::ECDSA(kp_builder) => kp_builder.generate()?,
SignatureKeyPairBuilder::EdDSA(kp_builder) => kp_builder.generate()?,
SignatureKeyPairBuilder::RSA(kp_builder) => kp_builder.generate()?,
};
Ok(handle)
}
fn import(
kp_builder_handle: Handle,
encoded: &[u8],
encoding: KeyPairEncoding,
) -> Result<Handle, Error> {
let kp_builder = WASI_CRYPTO_CTX
.signature_keypair_builder_manager
.get(kp_builder_handle)?;
let handle = match kp_builder {
SignatureKeyPairBuilder::ECDSA(kp_builder) => kp_builder.import(encoded, encoding)?,
SignatureKeyPairBuilder::EdDSA(kp_builder) => kp_builder.import(encoded, encoding)?,
SignatureKeyPairBuilder::RSA(kp_builder) => kp_builder.import(encoded, encoding)?,
};
Ok(handle)
}
fn public_key(&self) -> Result<Handle, Error> {
let pk = match self {
SignatureKeyPair::ECDSA(kp) => {
let raw_pk = kp.raw_public_key();
SignaturePublicKey::ECDSA(ECDSASignaturePublicKey::from_raw(kp.alg, raw_pk)?)
}
SignatureKeyPair::EdDSA(kp) => {
let raw_pk = kp.raw_public_key();
SignaturePublicKey::EdDSA(EdDSASignaturePublicKey::from_raw(kp.alg, raw_pk)?)
}
SignatureKeyPair::RSA(kp) => {
let raw_pk = kp.raw_public_key();
SignaturePublicKey::RSA(RSASignaturePublicKey::from_raw(kp.alg, raw_pk)?)
}
};
let handle = WASI_CRYPTO_CTX.signature_publickey_manager.register(pk)?;
Ok(handle)
}
}
#[derive(Clone, Copy, Debug)]
pub enum SignatureKeyPairBuilder {
ECDSA(ECDSASignatureKeyPairBuilder),
EdDSA(EdDSASignatureKeyPairBuilder),
RSA(RSASignatureKeyPairBuilder),
}
impl SignatureKeyPairBuilder {
fn open(op_handle: Handle) -> Result<Handle, Error> {
let signature_op = WASI_CRYPTO_CTX.signature_op_manager.get(op_handle)?;
let kp_builder = match signature_op {
SignatureOp::ECDSA(_) => SignatureKeyPairBuilder::ECDSA(
ECDSASignatureKeyPairBuilder::new(signature_op.alg()),
),
SignatureOp::EdDSA(_) => SignatureKeyPairBuilder::EdDSA(
EdDSASignatureKeyPairBuilder::new(signature_op.alg()),
),
SignatureOp::RSA(_) => {
SignatureKeyPairBuilder::RSA(RSASignatureKeyPairBuilder::new(signature_op.alg()))
}
};
let handle = WASI_CRYPTO_CTX
.signature_keypair_builder_manager
.register(kp_builder)?;
Ok(handle)
}
}
pub fn signature_keypair_builder_open(op_handle: Handle) -> Result<Handle, Error> {
SignatureKeyPairBuilder::open(op_handle)
}
pub fn signature_keypair_builder_close(handle: Handle) -> Result<(), Error> {
WASI_CRYPTO_CTX
.signature_keypair_builder_manager
.close(handle)
}
pub fn signature_keypair_generate(kp_builder_handle: Handle) -> Result<Handle, Error> {
SignatureKeyPair::generate(kp_builder_handle)
}
pub fn signature_keypair_import(
kp_builder_handle: Handle,
encoded: &[u8],
encoding: KeyPairEncoding,
) -> Result<Handle, Error> {
SignatureKeyPair::import(kp_builder_handle, encoded, encoding)
}
pub fn signature_keypair_from_id(
_kp_builder_handle: Handle,
_kp_id: &[u8],
) -> Result<Handle, Error> {
bail!(CryptoError::NotAvailable)
}
pub fn signature_keypair_id(kp_handle: Handle) -> Result<Vec<u8>, Error> {
let _kp = WASI_CRYPTO_CTX.signature_keypair_manager.get(kp_handle)?;
bail!(CryptoError::NotAvailable)
}
pub fn signature_keypair_export(
kp_handle: Handle,
encoding: KeyPairEncoding,
) -> Result<Vec<u8>, Error> {
let kp = WASI_CRYPTO_CTX.signature_keypair_manager.get(kp_handle)?;
let encoded = kp.export(encoding)?;
Ok(encoded)
}
pub fn signature_keypair_publickey(kp_handle: Handle) -> Result<Handle, Error> {
let kp = WASI_CRYPTO_CTX.signature_keypair_manager.get(kp_handle)?;
let handle = kp.public_key()?;
Ok(handle)
}
pub fn signature_keypair_close(handle: Handle) -> Result<(), Error> {
WASI_CRYPTO_CTX.signature_keypair_manager.close(handle)
}
|
use crate::set1::aes;
use crate::utils::random::{coin_flip, random_bytes, random_in_range};
pub fn encryption_oracle<T: AsRef<[u8]>>(input: T) -> (bool, Vec<u8>) {
let key = random_bytes(16);
let padding_size: usize = random_in_range(5, 10);
let mut padded_input = random_bytes(padding_size);
padded_input.extend(input.as_ref());
padded_input.extend(random_bytes(padding_size));
let use_cbc = coin_flip();
if use_cbc {
let iv = random_bytes(16);
(use_cbc, aes::cbc::encrypt(key, padded_input, iv))
} else {
(use_cbc, aes::ecb::encrypt(key, padded_input))
}
}
#[cfg(test)]
mod test {
use crate::set1::aes::ecb::has_repeated_blocks;
use crate::set1::aes::oracle::encryption_oracle;
#[test]
fn detect_oracle_choice() {
let input: Vec<u8> = "A".repeat(43).into_bytes();
for _ in 1..1000 {
let (using_cbc, output) = encryption_oracle(&input);
assert_ne!(using_cbc, has_repeated_blocks(output))
}
}
}
|
pub mod boundaries;
pub mod layout;
pub mod pane_resizer;
pub mod panes;
pub mod tab;
pub fn _start_client() {}
|
use std::fs;
use std::collections::HashMap;
use seven::*;
fn part1(bags: &HashMap<String, Bag>, bagname: &String) -> usize {
let mut count: usize = 0;
for (k, v) in bags.iter() {
if k != bagname && v.can_contain(bags, bagname) {
count +=1;
}
}
count
}
fn part2(bags: &HashMap<String, Bag>, bagname: &String) -> usize {
bags.get(bagname).unwrap().count_content(bags, 0)
}
fn main() {
let input = fs::read_to_string("input.txt")
.expect("Could not read input file");
let bags = bagnames(&input);
println!("Part 1: {}",
part1(&bags, &String::from("shiny gold")));
println!("Part 2: {}",
part2(&bags, &String::from("shiny gold")));
}
|
#[doc = "Reader of register ADV_ACCADDR_L"]
pub type R = crate::R<u32, super::ADV_ACCADDR_L>;
#[doc = "Writer for register ADV_ACCADDR_L"]
pub type W = crate::W<u32, super::ADV_ACCADDR_L>;
#[doc = "Register ADV_ACCADDR_L `reset()`'s with value 0xbed6"]
impl crate::ResetValue for super::ADV_ACCADDR_L {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0xbed6
}
}
#[doc = "Reader of field `ADV_ACCADDR_L`"]
pub type ADV_ACCADDR_L_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `ADV_ACCADDR_L`"]
pub struct ADV_ACCADDR_L_W<'a> {
w: &'a mut W,
}
impl<'a> ADV_ACCADDR_L_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - Lower 16 bit of ADV packet access code"]
#[inline(always)]
pub fn adv_accaddr_l(&self) -> ADV_ACCADDR_L_R {
ADV_ACCADDR_L_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - Lower 16 bit of ADV packet access code"]
#[inline(always)]
pub fn adv_accaddr_l(&mut self) -> ADV_ACCADDR_L_W {
ADV_ACCADDR_L_W { w: self }
}
}
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo};
use std::fmt::Debug;
#[derive(Clone, Debug, Default)]
#[repr(C)]
pub struct OperatingRegion {
pub id: i64,
pub keep_in_areas: Vec<i64>,
pub keep_out_areas: Vec<i64>,
}
impl PartialEq for OperatingRegion {
fn eq(&self, _other: &OperatingRegion) -> bool {
true
&& &self.id == &_other.id
&& &self.keep_in_areas == &_other.keep_in_areas
&& &self.keep_out_areas == &_other.keep_out_areas
}
}
impl LmcpSubscription for OperatingRegion {
fn subscription() -> &'static str { "afrl.cmasi.OperatingRegion" }
}
impl Struct for OperatingRegion {
fn struct_info() -> StructInfo {
StructInfo {
exist: 1,
series: 4849604199710720000u64,
version: 3,
struct_ty: 39,
}
}
}
impl Lmcp for OperatingRegion {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
let mut pos = 0;
{
let x = Self::struct_info().ser(buf)?;
pos += x;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.id.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.keep_in_areas.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.keep_out_areas.ser(r)?;
pos += writeb;
}
Ok(pos)
}
fn deser(buf: &[u8]) -> Result<(OperatingRegion, usize), Error> {
let mut pos = 0;
let (si, u) = StructInfo::deser(buf)?;
pos += u;
if si == OperatingRegion::struct_info() {
let mut out: OperatingRegion = Default::default();
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.id = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<i64>, usize) = Lmcp::deser(r)?;
out.keep_in_areas = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<i64>, usize) = Lmcp::deser(r)?;
out.keep_out_areas = x;
pos += readb;
}
Ok((out, pos))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
let mut size = 15;
size += self.id.size();
size += self.keep_in_areas.size();
size += self.keep_out_areas.size();
size
}
}
pub trait OperatingRegionT: Debug + Send {
fn as_afrl_cmasi_operating_region(&self) -> Option<&OperatingRegion> { None }
fn as_mut_afrl_cmasi_operating_region(&mut self) -> Option<&mut OperatingRegion> { None }
fn id(&self) -> i64;
fn id_mut(&mut self) -> &mut i64;
fn keep_in_areas(&self) -> &Vec<i64>;
fn keep_in_areas_mut(&mut self) -> &mut Vec<i64>;
fn keep_out_areas(&self) -> &Vec<i64>;
fn keep_out_areas_mut(&mut self) -> &mut Vec<i64>;
}
impl Clone for Box<OperatingRegionT> {
fn clone(&self) -> Box<OperatingRegionT> {
if let Some(x) = OperatingRegionT::as_afrl_cmasi_operating_region(self.as_ref()) {
Box::new(x.clone())
} else {
unreachable!()
}
}
}
impl Default for Box<OperatingRegionT> {
fn default() -> Box<OperatingRegionT> { Box::new(OperatingRegion::default()) }
}
impl PartialEq for Box<OperatingRegionT> {
fn eq(&self, other: &Box<OperatingRegionT>) -> bool {
if let (Some(x), Some(y)) =
(OperatingRegionT::as_afrl_cmasi_operating_region(self.as_ref()),
OperatingRegionT::as_afrl_cmasi_operating_region(other.as_ref())) {
x == y
} else {
false
}
}
}
impl Lmcp for Box<OperatingRegionT> {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
if let Some(x) = OperatingRegionT::as_afrl_cmasi_operating_region(self.as_ref()) {
x.ser(buf)
} else {
unreachable!()
}
}
fn deser(buf: &[u8]) -> Result<(Box<OperatingRegionT>, usize), Error> {
let (si, _) = StructInfo::deser(buf)?;
if si == OperatingRegion::struct_info() {
let (x, readb) = OperatingRegion::deser(buf)?;
Ok((Box::new(x), readb))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
if let Some(x) = OperatingRegionT::as_afrl_cmasi_operating_region(self.as_ref()) {
x.size()
} else {
unreachable!()
}
}
}
impl OperatingRegionT for OperatingRegion {
fn as_afrl_cmasi_operating_region(&self) -> Option<&OperatingRegion> { Some(self) }
fn as_mut_afrl_cmasi_operating_region(&mut self) -> Option<&mut OperatingRegion> { Some(self) }
fn id(&self) -> i64 { self.id }
fn id_mut(&mut self) -> &mut i64 { &mut self.id }
fn keep_in_areas(&self) -> &Vec<i64> { &self.keep_in_areas }
fn keep_in_areas_mut(&mut self) -> &mut Vec<i64> { &mut self.keep_in_areas }
fn keep_out_areas(&self) -> &Vec<i64> { &self.keep_out_areas }
fn keep_out_areas_mut(&mut self) -> &mut Vec<i64> { &mut self.keep_out_areas }
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for OperatingRegion {
fn arbitrary<G: Gen>(_g: &mut G) -> OperatingRegion {
OperatingRegion {
id: Arbitrary::arbitrary(_g),
keep_in_areas: Arbitrary::arbitrary(_g),
keep_out_areas: Arbitrary::arbitrary(_g),
}
}
}
quickcheck! {
fn serializes(x: OperatingRegion) -> Result<TestResult, Error> {
use std::u16;
if x.keep_in_areas.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.keep_out_areas.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
Ok(TestResult::from_bool(sx == x.size()))
}
fn roundtrips(x: OperatingRegion) -> Result<TestResult, Error> {
use std::u16;
if x.keep_in_areas.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
if x.keep_out_areas.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
let (y, sy) = OperatingRegion::deser(&buf)?;
Ok(TestResult::from_bool(sx == sy && x == y))
}
}
}
|
use std::fs;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use crate::dnd::ability_scores::AbilityScores;
use crate::dnd::character::Character;
use crate::dnd::html_formatting::ToHTMLString;
use crate::io::file_utils::random_line_from_file;
use crate::threading::threadpool::ThreadPool;
static NAMES_FILE_PATH: &str = "resources/names.txt";
static RACE_FILE_PATH: &str = "resources/race.txt";
static CLASS_FILE_PATH: &str = "resources/class.txt";
/// Starts the server and begins listening on the given address, and responding with
/// random D&D characters.
///
/// # Arguments
///
/// `address` - the IP address and port to listen on
pub fn start_server(address: &str) {
let listener = TcpListener::bind(address).unwrap();
let pool = ThreadPool::new(10).unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
pool.execute(|| {
handle_connection(stream);
});
}
}
fn handle_connection(mut stream: TcpStream) {
let mut buffer = [0; 512];
stream.read(&mut buffer).unwrap();
let get = b"GET / HTTP/1.1\r\n";
let (status_line, filename) = if buffer.starts_with(get) {
("HTTP/1.1 200 OK\r\n\r\n", "index.html")
} else {
("HTTP/1.1 404 NOT FOUND\r\n\r\n", "404.html")
};
send_response(stream, status_line, filename);
}
fn send_response(mut stream: TcpStream, status_line: &str, filename: &str) {
let contents = fs::read_to_string(filename).unwrap();
let mut response = format!("{}{}", status_line, contents);
let character_output = build_character_html();
response.insert_str(202, &character_output);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
}
fn build_character_html() -> String {
let name = random_line_from_file(&NAMES_FILE_PATH).unwrap();
let race = random_line_from_file(&RACE_FILE_PATH).unwrap();
let class = random_line_from_file(&CLASS_FILE_PATH).unwrap();
let ability_scores = AbilityScores::from_4d6_drop_lowest_in_order();
let character = Character::new(&name, &race, &class, ability_scores);
println!("{}", character);
character.to_html_string()
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.