text stringlengths 8 4.13M |
|---|
use super::float_eq;
use intersections::Intersection;
use materials::Material;
use matrices::Matrix4;
use rays::Ray;
use tuples::Tuple;
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum ShapeKind {
Sphere,
Plane,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Shape {
pub transform: Matrix4,
pub material: Material,
pub shape_kind: ShapeKind,
}
impl Shape {
pub fn normal_at(&self, point: Tuple) -> Tuple {
let local_point = self.transform.inverse() * point;
let local_normal = self.local_normal_at(local_point);
let mut world_normal =
self.transform.inverse().transpose() * local_normal;
world_normal.w = 0.0;
world_normal.normalize()
}
fn local_normal_at(&self, point: Tuple) -> Tuple {
match self.shape_kind {
ShapeKind::Plane => Tuple::vector(0.0, 1.0, 0.0),
ShapeKind::Sphere => point - Tuple::point(0.0, 0.0, 0.0),
}
}
pub fn intersections(&self, ts: Vec<f32>) -> Vec<Intersection> {
ts.iter()
.map(|t| Intersection::new(*t, self.clone()))
.collect::<Vec<Intersection>>()
}
pub fn intersect(&self, ray: &Ray) -> Vec<Intersection> {
let local_ray = ray.transform(self.transform.inverse());
self.local_intersect(local_ray)
}
fn local_intersect(&self, ray: Ray) -> Vec<Intersection> {
match self.shape_kind {
ShapeKind::Plane => {
if float_eq(ray.direction.y, 0.0) {
vec![]
} else {
let t = -ray.origin.y / ray.direction.y;
vec![Intersection::new(t, self.clone())]
}
}
ShapeKind::Sphere => {
let sphere_to_ray = ray.origin - Tuple::point(0.0, 0.0, 0.0);
let a = ray.direction.dot(ray.direction);
let b = 2.0 * ray.direction.dot(sphere_to_ray);
let c = sphere_to_ray.dot(sphere_to_ray) - 1.0;
let discriminant = b.powi(2) - 4.0 * a * c;
if discriminant < 0.0 {
vec![]
} else {
let t1 = (-b - discriminant.sqrt()) / (2.0 * a);
let t2 = (-b + discriminant.sqrt()) / (2.0 * a);
let i1 = Intersection::new(t1, self.clone());
let i2 = Intersection::new(t2, self.clone());
if t1 > t2 {
vec![i2, i1]
} else {
vec![i1, i2]
}
}
}
}
}
}
impl Default for Shape {
fn default() -> Shape {
Shape {
transform: Matrix4::default(),
material: Material::default(),
shape_kind: ShapeKind::Sphere,
}
}
}
#[test]
fn test_the_default_transformation() {
let s = Shape::default();
assert_eq!(s.transform, Matrix4::default());
}
#[test]
fn test_assigning_a_transformation() {
let mut s = Shape::default();
s.transform = Matrix4::translation(2.0, 3.0, 4.0);
assert_eq!(s.transform, Matrix4::translation(2.0, 3.0, 4.0));
}
#[test]
fn test_the_default_material() {
let s = Shape::default();
assert_eq!(s.material, Material::default());
}
#[test]
fn test_assigning_a_material() {
let mut s = Shape::default();
let mut m = Material::default();
m.ambient = 1.0;
s.material = m;
assert_eq!(s.material, m);
}
pub struct Sphere {}
impl Sphere {
pub fn new() -> Shape {
Shape {
transform: Matrix4::default(),
material: Material::default(),
shape_kind: ShapeKind::Sphere,
}
}
}
#[test]
fn test_a_spheres_default_transformation() {
let s = Sphere::new();
assert_eq!(s.transform, Matrix4::default());
}
#[test]
fn test_changing_a_spheres_transformation() {
let mut s = Sphere::new();
let t = Matrix4::translation(2.0, 3.0, 4.0);
s.transform = t;
assert_eq!(s.transform, t);
}
#[test]
fn test_a_sphere_has_a_default_material() {
let s = Sphere::new();
assert_eq!(s.material, Material::default());
}
#[test]
fn test_a_sphere_may_be_assigned_a_material() {
let mut s = Sphere::new();
let m = Material::new(Tuple::color(2.0, 0.0, 5.0), 2.0, 3.0, 4.0, 5.0, 0.0);
s.material = m;
assert_eq!(s.material, m);
}
#[test]
fn test_the_normal_on_a_sphere_at_a_point_on_the_x_axis() {
let s = Sphere::new();
let n = s.normal_at(Tuple::point(1.0, 0.0, 0.0));
assert_eq!(n, Tuple::vector(1.0, 0.0, 0.0));
}
#[test]
fn test_the_normal_on_a_sphere_at_a_point_on_the_y_axis() {
let s = Sphere::new();
let n = s.normal_at(Tuple::point(0.0, 1.0, 0.0));
assert_eq!(n, Tuple::vector(0.0, 1.0, 0.0));
}
#[test]
fn test_the_normal_on_a_sphere_at_a_point_onn_the_z_axis() {
let s = Sphere::new();
let n = s.normal_at(Tuple::point(0.0, 0.0, 1.0));
assert_eq!(n, Tuple::vector(0.0, 0.0, 1.0));
}
#[test]
fn test_the_normal_on_a_sphere_at_a_non_axial_point() {
let s = Sphere::new();
let n = s.normal_at(Tuple::point(
3f32.sqrt() / 3.0,
3f32.sqrt() / 3.0,
3f32.sqrt() / 3.0,
));
assert_eq!(
n,
Tuple::vector(3f32.sqrt() / 3.0, 3f32.sqrt() / 3.0, 3f32.sqrt() / 3.0)
)
}
#[test]
fn test_the_normal_is_a_normalized_vector() {
let s = Sphere::new();
let n = s.normal_at(Tuple::point(
3f32.sqrt() / 3.0,
3f32.sqrt() / 3.0,
3f32.sqrt() / 3.0,
));
assert_eq!(n, n.normalize());
}
#[test]
fn test_computing_the_normal_on_a_translated_sphere() {
let mut s = Sphere::new();
s.transform = Matrix4::translation(0.0, 1.0, 0.0);
let n = s.normal_at(Tuple::point(0.0, 1.70711, -0.70711));
assert_eq!(n, Tuple::vector(0.0, 0.70711, -0.70711))
}
#[test]
fn test_computing_the_normal_on_a_scaled_sphere() {
let mut s = Sphere::new();
s.transform = Matrix4::scaling(1.0, 0.5, 1.0);
let n =
s.normal_at(Tuple::point(0.0, 2f32.sqrt() / 2.0, -2f32.sqrt() / 2.0));
assert_eq!(n, Tuple::vector(0.0, 0.97014, -0.24254));
}
#[test]
fn test_a_ray_intersects_a_sphere_at_two_points() {
let r =
Ray::new(Tuple::point(0.0, 0.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].t, 4.0);
assert_eq!(xs[1].t, 6.0);
}
#[test]
fn test_a_ray_intersects_a_sphere_at_a_tangent() {
let r =
Ray::new(Tuple::point(0.0, 1.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].t, 5.0);
assert_eq!(xs[1].t, 5.0);
}
#[test]
fn test_a_ray_misses_a_sphere() {
let r =
Ray::new(Tuple::point(0.0, 2.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 0);
}
#[test]
fn test_a_ray_originates_inside_a_sphere() {
let r = Ray::new(Tuple::point(0.0, 0.0, 0.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].t, -1.0);
assert_eq!(xs[1].t, 1.0);
}
#[test]
fn test_a_sphere_is_behind_a_ray() {
let r = Ray::new(Tuple::point(0.0, 0.0, 5.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].t, -6.0);
assert_eq!(xs[1].t, -4.0);
}
#[test]
fn test_intersect_sets_the_object_on_the_intersection() {
let r =
Ray::new(Tuple::point(0.0, 0.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let s = Sphere::new();
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].object, s.clone());
assert_eq!(xs[1].object, s.clone());
}
#[test]
fn test_intersecting_a_scaled_sphere_with_a_ray() {
let r =
Ray::new(Tuple::point(0.0, 0.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let mut s = Sphere::new();
s.transform = Matrix4::scaling(2.0, 2.0, 2.0);
let xs = s.intersect(&r);
assert_eq!(xs.len(), 2);
assert_eq!(xs[0].t, 3.0);
assert_eq!(xs[1].t, 7.0);
}
#[test]
fn test_intersecting_a_translated_sphere_with_a_ray() {
let r =
Ray::new(Tuple::point(0.0, 0.0, -5.0), Tuple::vector(0.0, 0.0, 1.0));
let mut s = Sphere::new();
s.transform = Matrix4::translation(5.0, 0.0, 0.0);
let xs = s.intersect(&r);
assert_eq!(xs.len(), 0);
}
pub struct Plane {}
impl Plane {
pub fn new() -> Shape {
let mut shape = Shape::default();
shape.shape_kind = ShapeKind::Plane;
shape
}
}
#[test]
fn test_the_normal_of_a_plane_is_constant_everywhere() {
let p = Plane::new();
let normal = Tuple::vector(0.0, 1.0, 0.0);
assert_eq!(p.local_normal_at(Tuple::point(0.0, 0.0, 0.0)), normal);
assert_eq!(p.local_normal_at(Tuple::point(10.0, 0.0, -10.0)), normal);
assert_eq!(p.local_normal_at(Tuple::point(-5.0, 0.0, 150.0)), normal);
}
#[test]
fn test_intersect_with_a_ray_parallel_to_the_plane() {
let p = Plane::new();
let r =
Ray::new(Tuple::point(0.0, 10.0, 0.0), Tuple::vector(0.0, 0.0, 1.0));
let xs = p.local_intersect(r);
assert!(xs.is_empty());
}
#[test]
fn test_intersect_with_a_coplanar_ray() {
let p = Plane::new();
let r = Ray::new(Tuple::point(0.0, 0.0, 0.0), Tuple::vector(0.0, 0.0, 1.0));
let xs = p.local_intersect(r);
assert!(xs.is_empty());
}
#[test]
fn test_a_ray_intersecting_a_plane_from_above() {
let p = Plane::new();
let r =
Ray::new(Tuple::point(0.0, 1.0, 0.0), Tuple::vector(0.0, -1.0, 0.0));
let xs = p.local_intersect(r);
assert_eq!(xs.len(), 1);
assert_eq!(xs[0].t, 1.0);
assert_eq!(xs[0].object, p);
}
#[test]
fn test_a_ray_intersecting_a_plane_from_below() {
let p = Plane::new();
let r =
Ray::new(Tuple::point(0.0, -1.0, 0.0), Tuple::vector(0.0, 1.0, 0.0));
let xs = p.local_intersect(r);
assert_eq!(xs.len(), 1);
assert_eq!(xs[0].t, 1.0);
assert_eq!(xs[0].object, p);
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Build the genesis block according to the spec
use std::convert::{TryFrom, TryInto};
use std::sync::Arc;
use chrono::DateTime;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use main_base::spec::{Spec, Tx};
use node_executor::{module, Context, Executor};
use primitives::codec::Encode;
use primitives::errors::{CommonError, CommonResult};
use primitives::types::ExecutionGap;
use primitives::{BlockNumber, BuildBlockParams, FullTransaction, Transaction};
use crate::errors;
pub fn build_genesis(
spec: &Spec,
executor: &Executor,
context: &Context,
) -> CommonResult<BuildBlockParams> {
let mut meta_txs = vec![];
let mut payload_txs = vec![];
let mut timestamp: Option<u64> = None;
for tx in &spec.genesis.txs {
let tx = build_tx(tx, &executor, context, &mut timestamp)?;
let is_meta = executor.is_meta_call(&tx.call)?;
let tx_hash = executor.hash_transaction(&tx)?;
let tx = FullTransaction { tx, tx_hash };
match is_meta {
true => meta_txs.push(Arc::new(tx)),
false => payload_txs.push(Arc::new(tx)),
}
}
let timestamp =
timestamp.ok_or_else(|| errors::ErrorKind::Spec("No timestamp specified".to_string()))?;
let number = 0;
let execution_number = 0;
Ok(BuildBlockParams {
number,
timestamp,
meta_txs,
payload_txs,
execution_number,
})
}
fn build_tx(
tx: &Tx,
executor: &Executor,
context: &Context,
timestamp: &mut Option<u64>,
) -> CommonResult<Transaction> {
let module = &tx.module;
let method = &tx.method;
let params = &tx.params;
match (module.as_str(), method.as_str()) {
("system", "init") => {
let module_params: module::system::InitParams =
get_module_params::<SystemInitParams>(params)?.try_into()?;
*timestamp = Some(module_params.timestamp);
build_validate_tx(executor, context, module, method, module_params, params)
}
("balance", "init") => {
let module_params: module::balance::InitParams = get_module_params(params)?;
build_validate_tx(executor, context, module, method, module_params, params)
}
("poa", "init") => {
let module_params: module::poa::InitParams = get_module_params(params)?;
build_validate_tx(executor, context, module, method, module_params, params)
}
("raft", "init") => {
let module_params: module::raft::InitParams = get_module_params(params)?;
build_validate_tx(executor, context, module, method, module_params, params)
}
("hotstuff", "init") => {
let module_params: module::hotstuff::InitParams = get_module_params(params)?;
build_validate_tx(executor, context, module, method, module_params, params)
}
("contract", "init") => {
let module_params: module::contract::InitParams = get_module_params(params)?;
build_validate_tx(executor, context, module, method, module_params, params)
}
_ => Err(errors::ErrorKind::Spec(format!(
"Unknown module or method: {}.{}",
module, method
))
.into()),
}
}
fn build_validate_tx<P: Encode>(
executor: &Executor,
context: &Context,
module: &str,
method: &str,
module_params: P,
params: &str,
) -> CommonResult<Transaction> {
let call = executor.build_call(module.to_string(), method.to_string(), module_params)?;
let tx = executor.build_tx(None, call).map_err(|e| {
errors::ErrorKind::Spec(format!(
"Invalid params for {}.{}: \n{} \ncause: {}",
module, method, params, e
))
})?;
executor.validate_tx(context, &tx, false).map_err(|e| {
errors::ErrorKind::Spec(format!(
"Invalid params for {}.{}: \n{} \ncause: {}",
module, method, params, e
))
})?;
Ok(tx)
}
#[derive(Deserialize)]
pub struct SystemInitParams {
pub chain_id: String,
pub timestamp: String,
pub max_until_gap: BlockNumber,
pub max_execution_gap: ExecutionGap,
pub consensus: String,
}
impl TryFrom<SystemInitParams> for module::system::InitParams {
type Error = CommonError;
fn try_from(value: SystemInitParams) -> Result<Self, Self::Error> {
let timestamp = DateTime::parse_from_rfc3339(&value.timestamp)
.map_err(|e| errors::ErrorKind::Spec(format!("Invalid time format: {:?}", e)))?;
let timestamp = timestamp.timestamp_millis() as u64;
Ok(module::system::InitParams {
chain_id: value.chain_id,
timestamp,
max_until_gap: value.max_until_gap,
max_execution_gap: value.max_execution_gap,
consensus: value.consensus,
})
}
}
fn get_module_params<P>(params: &str) -> CommonResult<P>
where
P: DeserializeOwned,
{
let params = serde_json::from_str::<P>(params)
.map_err(|e| errors::ErrorKind::Spec(format!("Invalid json: {:?}", e)))?;
Ok(params)
}
#[cfg(test)]
mod tests {
use primitives::Address;
use super::*;
#[test]
fn test_system_init_params() {
let str = r#"
{
"chain_id": "chain-test",
"timestamp": "2020-04-16T23:46:02.189+08:00",
"max_until_gap": 20,
"max_execution_gap": 8,
"consensus": "poa"
}
"#;
let param = get_module_params::<SystemInitParams>(str).unwrap();
let param: module::system::InitParams = param.try_into().unwrap();
assert_eq!(
param,
module::system::InitParams {
chain_id: "chain-test".to_string(),
timestamp: 1587051962189,
max_until_gap: 20,
max_execution_gap: 8,
consensus: "poa".to_string(),
}
)
}
#[test]
fn test_balance_init_params() {
let str = r#"
{
"endow":[
["0001020304050607080900010203040506070809", 1],
["000102030405060708090001020304050607080a", 2]
]
}
"#;
let param = get_module_params::<module::balance::InitParams>(str).unwrap();
assert_eq!(
param,
module::balance::InitParams {
endow: vec![
(
Address(vec![
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
]),
1
),
(
Address(vec![
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 10
]),
2
)
]
}
)
}
#[test]
fn test_poa_init_params() {
let str = r#"
{
"block_interval": 1000,
"admin": {
"threshold": 1,
"members": [["01020304", 1]]
},
"authority": "01020304"
}
"#;
let param = get_module_params::<module::poa::InitParams>(str).unwrap();
assert_eq!(
param,
module::poa::InitParams {
block_interval: Some(1000),
admin: module::poa::Admin {
threshold: 1,
members: vec![(Address::from_hex("01020304").unwrap(), 1)],
},
authority: Address::from_hex("01020304").unwrap(),
}
)
}
#[test]
fn test_raft_init_params() {
let str = r#"
{
"block_interval": 3000,
"heartbeat_interval": 100,
"election_timeout_min": 500,
"election_timeout_max": 1000,
"admin" : {
"threshold": 1,
"members": [
["0001020304050607080900010203040506070809", 1]
]
},
"authorities": {
"members": [
"0001020304050607080900010203040506070809",
"000102030405060708090001020304050607080a"
]
}
}
"#;
let param = get_module_params::<module::raft::InitParams>(str).unwrap();
assert_eq!(
param,
module::raft::InitParams {
block_interval: Some(3000),
heartbeat_interval: 100,
election_timeout_min: 500,
election_timeout_max: 1000,
admin: module::raft::Admin {
threshold: 1,
members: vec![(
Address(vec![
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
]),
1
)],
},
authorities: module::raft::Authorities {
members: vec![
Address(vec![
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
]),
Address(vec![
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 10
]),
]
},
}
)
}
#[test]
fn test_contract_init_params() {
let str = r#"
{
"max_stack_height": 16384,
"initial_memory_pages": 1024,
"max_memory_pages": 2048,
"max_share_value_len": 104857600,
"max_share_size": 1024,
"max_nest_depth": 8
}
"#;
let param = get_module_params::<module::contract::InitParams>(str).unwrap();
assert_eq!(
param,
module::contract::InitParams {
max_stack_height: Some(16384),
initial_memory_pages: Some(1024),
max_memory_pages: Some(2048),
max_share_value_len: Some(104857600),
max_share_size: Some(1024),
max_nest_depth: Some(8),
}
)
}
}
|
// Entry point for subte
extern crate pancurses as curses;
extern crate fourthrail;
use fourthrail::fourthrail::*;
/* */
fn main () {
// Initialise curses first.
let window = curses::initscr();
let (height, width) = window.get_max_yx();
// Then check terminal size.
match (height, width) {
(h, w) if h < graphic::INNER_HEIGHT || w < graphic::INNER_WIDTH => {
curses::endwin();
println!("\nTerminal too small.\n");
return;
}
(h, w) if h > graphic::INNER_HEIGHT => {
window.mvprintw(
(h - graphic::INNER_HEIGHT - 1) / 2,
(w - graphic::INNER_WIDTH) / 2,
"> Subte <"
);
}
_ => { window.mvaddch(0, 0, ' '); }
}
// Then set the behaviour of the cursor.
curses::noecho();
curses::curs_set(0);
curses::use_default_colors();
curses::start_color();
graphic::init_display();
let neww = curses::newwin(
graphic::INNER_HEIGHT,
graphic::INNER_WIDTH,
(height - graphic::INNER_HEIGHT + 1) / 2,
(width - graphic::INNER_WIDTH) / 2
);
let t1 = TileBuilder::new()
.name(String::from("TileTile"))
.symbol('.')
.colour(1, curses::COLOR_WHITE, curses::COLOR_BLACK)
.opaque(false)
.solid(false)
.finalise();
let t2 = TileBuilder::new()
.name(String::from("BadTile"))
.symbol('#')
.colour(2, curses::COLOR_WHITE, curses::COLOR_BLACK)
.opaque(true)
.solid(true)
.finalise();
let r = Resource { tile_defs: vec![t1, t2] };
let mut fourthrail = main::Fourthrail::initialise(neww, &r);
curses::flash();
window.keypad(true);
window.getch();
fourthrail.update_graphic();
// Then enter the main loop
loop {
fourthrail.update_graphic();
match window.getch() {
Some(curses::Input::Character('q')) => {
let exitw = curses::newwin(4, 40, 4, 4);
exitw.mv(1,1);
exitw.printw(langue::EXIT_REALLY);
exitw.mv(2,1);
exitw.printw(langue::YN);
exitw.refresh();
match window.getch() {
Some(curses::Input::Character('y'))
| Some(curses::Input::Character('Y')) => break,
_ => continue
}
}
Some(k) => fourthrail.turn(k),
_ => continue
}
}
// We are done.
curses::endwin();
}
|
#[doc = "Reader of register DDRCTRL_DBG1"]
pub type R = crate::R<u32, super::DDRCTRL_DBG1>;
#[doc = "Writer for register DDRCTRL_DBG1"]
pub type W = crate::W<u32, super::DDRCTRL_DBG1>;
#[doc = "Register DDRCTRL_DBG1 `reset()`'s with value 0"]
impl crate::ResetValue for super::DDRCTRL_DBG1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DIS_DQ`"]
pub type DIS_DQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DIS_DQ`"]
pub struct DIS_DQ_W<'a> {
w: &'a mut W,
}
impl<'a> DIS_DQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DIS_HIF`"]
pub type DIS_HIF_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DIS_HIF`"]
pub struct DIS_HIF_W<'a> {
w: &'a mut W,
}
impl<'a> DIS_HIF_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bit 0 - DIS_DQ"]
#[inline(always)]
pub fn dis_dq(&self) -> DIS_DQ_R {
DIS_DQ_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - DIS_HIF"]
#[inline(always)]
pub fn dis_hif(&self) -> DIS_HIF_R {
DIS_HIF_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - DIS_DQ"]
#[inline(always)]
pub fn dis_dq(&mut self) -> DIS_DQ_W {
DIS_DQ_W { w: self }
}
#[doc = "Bit 1 - DIS_HIF"]
#[inline(always)]
pub fn dis_hif(&mut self) -> DIS_HIF_W {
DIS_HIF_W { w: self }
}
}
|
use crate::get_set_swap;
use crate::{command::Command, scene::commands::SceneContext};
use rg3d::core::algebra::Vector3;
use rg3d::sound::buffer::SoundBufferResource;
use rg3d::sound::context::SoundContext;
use rg3d::{
core::pool::{Handle, Ticket},
sound::source::SoundSource,
};
#[derive(Debug)]
pub struct AddSoundSourceCommand {
ticket: Option<Ticket<SoundSource>>,
handle: Handle<SoundSource>,
source: Option<SoundSource>,
cached_name: String,
}
impl AddSoundSourceCommand {
pub fn new(source: SoundSource) -> Self {
Self {
ticket: None,
handle: Default::default(),
cached_name: format!("Add Node {}", source.name()),
source: Some(source),
}
}
}
impl<'a> Command<'a> for AddSoundSourceCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
self.cached_name.clone()
}
fn execute(&mut self, context: &mut Self::Context) {
match self.ticket.take() {
None => {
self.handle = context
.scene
.sound_context
.state()
.add_source(self.source.take().unwrap());
}
Some(ticket) => {
let handle = context
.scene
.sound_context
.state()
.put_back(ticket, self.source.take().unwrap());
assert_eq!(handle, self.handle);
}
}
}
fn revert(&mut self, context: &mut Self::Context) {
let (ticket, source) = context
.scene
.sound_context
.state()
.take_reserve(self.handle);
self.ticket = Some(ticket);
self.source = Some(source);
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(ticket) = self.ticket.take() {
context.scene.sound_context.state().forget_ticket(ticket)
}
}
}
#[derive(Debug)]
pub struct DeleteSoundSourceCommand {
handle: Handle<SoundSource>,
ticket: Option<Ticket<SoundSource>>,
source: Option<SoundSource>,
}
impl DeleteSoundSourceCommand {
pub fn new(handle: Handle<SoundSource>) -> Self {
Self {
handle,
ticket: None,
source: None,
}
}
}
impl<'a> Command<'a> for DeleteSoundSourceCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Delete Sound Source".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let (ticket, source) = context
.scene
.sound_context
.state()
.take_reserve(self.handle);
self.source = Some(source);
self.ticket = Some(ticket);
}
fn revert(&mut self, context: &mut Self::Context) {
self.handle = context
.scene
.sound_context
.state()
.put_back(self.ticket.take().unwrap(), self.source.take().unwrap());
}
fn finalize(&mut self, context: &mut Self::Context) {
if let Some(ticket) = self.ticket.take() {
context.scene.sound_context.state().forget_ticket(ticket)
}
}
}
#[derive(Debug)]
pub struct MoveSpatialSoundSourceCommand {
source: Handle<SoundSource>,
old_position: Vector3<f32>,
new_position: Vector3<f32>,
}
impl MoveSpatialSoundSourceCommand {
pub fn new(
node: Handle<SoundSource>,
old_position: Vector3<f32>,
new_position: Vector3<f32>,
) -> Self {
Self {
source: node,
old_position,
new_position,
}
}
fn swap(&mut self) -> Vector3<f32> {
let position = self.new_position;
std::mem::swap(&mut self.new_position, &mut self.old_position);
position
}
fn set_position(&self, sound_context: &SoundContext, position: Vector3<f32>) {
let mut state = sound_context.state();
if let SoundSource::Spatial(spatial) = state.source_mut(self.source) {
spatial.set_position(position);
}
}
}
impl<'a> Command<'a> for MoveSpatialSoundSourceCommand {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
"Move Spatial Sound Source".to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
let position = self.swap();
self.set_position(&context.scene.sound_context, position);
}
fn revert(&mut self, context: &mut Self::Context) {
let position = self.swap();
self.set_position(&context.scene.sound_context, position);
}
}
macro_rules! define_sound_source_command {
($name:ident($human_readable_name:expr, $value_type:ty) where fn swap($self:ident, $source:ident) $apply_method:block ) => {
#[derive(Debug)]
pub struct $name {
handle: Handle<SoundSource>,
value: $value_type,
}
impl $name {
pub fn new(handle: Handle<SoundSource>, value: $value_type) -> Self {
Self { handle, value }
}
fn swap(&mut $self, sound_context: &SoundContext) {
let mut state = sound_context.state();
let $source = state.source_mut($self.handle);
$apply_method
}
}
impl<'a> Command<'a> for $name {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
$human_readable_name.to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.swap(&context.scene.sound_context);
}
fn revert(&mut self, context: &mut Self::Context) {
self.swap(&context.scene.sound_context);
}
}
};
}
macro_rules! define_spatial_sound_source_command {
($name:ident($human_readable_name:expr, $value_type:ty) where fn swap($self:ident, $source:ident) $apply_method:block ) => {
#[derive(Debug)]
pub struct $name {
handle: Handle<SoundSource>,
value: $value_type,
}
impl $name {
pub fn new(handle: Handle<SoundSource>, value: $value_type) -> Self {
Self { handle, value }
}
fn swap(&mut $self, sound_context: &SoundContext) {
let mut state = sound_context.state();
if let SoundSource::Spatial($source) = state.source_mut($self.handle) {
$apply_method
} else {
unreachable!();
}
}
}
impl<'a> Command<'a> for $name {
type Context = SceneContext<'a>;
fn name(&mut self, _context: &Self::Context) -> String {
$human_readable_name.to_owned()
}
fn execute(&mut self, context: &mut Self::Context) {
self.swap(&context.scene.sound_context);
}
fn revert(&mut self, context: &mut Self::Context) {
self.swap(&context.scene.sound_context);
}
}
};
}
define_sound_source_command!(SetSoundSourceGainCommand("Set Sound Source Gain", f32) where fn swap(self, source) {
get_set_swap!(self, source, gain, set_gain);
});
define_sound_source_command!(SetSoundSourceBufferCommand("Set Sound Source Buffer", Option<SoundBufferResource>) where fn swap(self, source) {
get_set_swap!(self, source, buffer, set_buffer);
});
define_sound_source_command!(SetSoundSourceNameCommand("Set Sound Source Name", String) where fn swap(self, source) {
get_set_swap!(self, source, name_owned, set_name);
});
define_sound_source_command!(SetSoundSourcePitchCommand("Set Sound Source Pitch", f64) where fn swap(self, source) {
get_set_swap!(self, source, pitch, set_pitch);
});
define_sound_source_command!(SetSoundSourceLoopingCommand("Set Sound Source Looping", bool) where fn swap(self, source) {
get_set_swap!(self, source, is_looping, set_looping);
});
define_sound_source_command!(SetSoundSourcePlayOnceCommand("Set Sound Source Play Once", bool) where fn swap(self, source) {
get_set_swap!(self, source, is_play_once, set_play_once);
});
define_spatial_sound_source_command!(SetSpatialSoundSourcePositionCommand("Set Spatial Sound Source Position", Vector3<f32>) where fn swap(self, source) {
get_set_swap!(self, source, position, set_position);
});
define_spatial_sound_source_command!(SetSpatialSoundSourceRadiusCommand("Set Spatial Sound Source Radius", f32) where fn swap(self, source) {
get_set_swap!(self, source, radius, set_radius);
});
define_spatial_sound_source_command!(SetSpatialSoundSourceRolloffFactorCommand("Set Spatial Sound Source Rolloff Factor", f32) where fn swap(self, source) {
get_set_swap!(self, source, rolloff_factor, set_rolloff_factor);
});
define_spatial_sound_source_command!(SetSpatialSoundSourceMaxDistanceCommand("Set Spatial Sound Source Max Distance", f32) where fn swap(self, source) {
get_set_swap!(self, source, max_distance, set_max_distance);
});
|
// Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use errors::Error;
use hashbrown::hash_map::DefaultHashBuilder;
use hashbrown::{HashMap, HashSet};
use std::cell::RefCell;
use std::cmp;
// Since it's an integer, it rounds for us.
#[inline]
fn majority(total: usize) -> usize {
(total / 2) + 1
}
/// The state of the progress.
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ProgressState {
/// Whether it's probing.
Probe,
/// Whether it's replicating.
Replicate,
/// Whethers it's a snapshot.
Snapshot,
}
impl Default for ProgressState {
fn default() -> ProgressState {
ProgressState::Probe
}
}
#[derive(Clone, Debug, Default)]
struct Configuration {
voters: HashSet<u64>,
learners: HashSet<u64>,
}
/// The status of an election according to a Candidate node.
///
/// This is returned by `progress_set.election_status(vote_map)`
#[derive(Clone, Copy, Debug)]
pub enum CandidacyStatus {
/// The election has been won by this Raft.
Elected,
/// It is still possible to win the election.
Eligible,
/// It is no longer possible to win the election.
Ineligible,
}
/// `ProgressSet` contains several `Progress`es,
/// which could be `Leader`, `Follower` and `Learner`.
#[derive(Default, Clone)]
pub struct ProgressSet {
progress: HashMap<u64, Progress>,
configuration: Configuration,
// A preallocated buffer for sorting in the minimally_commited_index function.
// You should not depend on these values unless you just set them.
// We use a cell to avoid taking a `&mut self`.
sort_buffer: RefCell<Vec<u64>>,
}
impl ProgressSet {
/// Creates a new ProgressSet.
pub fn new() -> Self {
ProgressSet {
progress: Default::default(),
configuration: Default::default(),
sort_buffer: Default::default(),
}
}
/// Create a progress sete with the specified sizes already reserved.
pub fn with_capacity(voters: usize, learners: usize) -> Self {
ProgressSet {
progress: HashMap::with_capacity_and_hasher(
voters + learners,
DefaultHashBuilder::default(),
),
configuration: Configuration {
voters: HashSet::with_capacity_and_hasher(voters, DefaultHashBuilder::default()),
learners: HashSet::with_capacity_and_hasher(
learners,
DefaultHashBuilder::default(),
),
},
sort_buffer: Default::default(),
}
}
/// Returns the status of voters.
#[inline]
pub fn voters(&self) -> impl Iterator<Item = (&u64, &Progress)> {
let set = self.voter_ids();
self.progress.iter().filter(move |(&k, _)| set.contains(&k))
}
/// Returns the status of learners.
#[inline]
pub fn learners(&self) -> impl Iterator<Item = (&u64, &Progress)> {
let set = self.learner_ids();
self.progress.iter().filter(move |(&k, _)| set.contains(&k))
}
/// Returns the mutable status of voters.
#[inline]
pub fn voters_mut(&mut self) -> impl Iterator<Item = (&u64, &mut Progress)> {
let ids = &self.configuration.voters;
self.progress
.iter_mut()
.filter(move |(k, _)| ids.contains(k))
}
/// Returns the mutable status of learners.
#[inline]
pub fn learners_mut(&mut self) -> impl Iterator<Item = (&u64, &mut Progress)> {
let ids = &self.configuration.learners;
self.progress
.iter_mut()
.filter(move |(k, _)| ids.contains(k))
}
/// Returns the ids of all known voters.
#[inline]
pub fn voter_ids(&self) -> &HashSet<u64> {
&self.configuration.voters
}
/// Returns the ids of all known learners.
#[inline]
pub fn learner_ids(&self) -> &HashSet<u64> {
&self.configuration.learners
}
/// Grabs a reference to the progress of a node.
#[inline]
pub fn get(&self, id: u64) -> Option<&Progress> {
self.progress.get(&id)
}
/// Grabs a mutable reference to the progress of a node.
#[inline]
pub fn get_mut(&mut self, id: u64) -> Option<&mut Progress> {
self.progress.get_mut(&id)
}
/// Returns an iterator across all the nodes and their progress.
#[inline]
pub fn iter(&self) -> impl ExactSizeIterator<Item = (&u64, &Progress)> {
self.progress.iter()
}
/// Returns a mutable iterator across all the nodes and their progress.
#[inline]
pub fn iter_mut(&mut self) -> impl ExactSizeIterator<Item = (&u64, &mut Progress)> {
self.progress.iter_mut()
}
/// Adds a voter node
pub fn insert_voter(&mut self, id: u64, pr: Progress) -> Result<(), Error> {
// If the progress exists already this is in error.
if self.progress.contains_key(&id) {
// Determine the correct error to return.
if self.learner_ids().contains(&id) {
return Err(Error::Exists(id, "learners"));
}
return Err(Error::Exists(id, "voters"));
}
self.configuration.voters.insert(id);
self.progress.insert(id, pr);
self.assert_progress_and_configuration_consistent();
Ok(())
}
/// Adds a learner to the cluster
pub fn insert_learner(&mut self, id: u64, pr: Progress) -> Result<(), Error> {
// If the progress exists already this is in error.
if self.progress.contains_key(&id) {
// Determine the correct error to return.
if self.learner_ids().contains(&id) {
return Err(Error::Exists(id, "learners"));
}
return Err(Error::Exists(id, "voters"));
}
self.configuration.learners.insert(id);
self.progress.insert(id, pr);
self.assert_progress_and_configuration_consistent();
Ok(())
}
/// Removes the peer from the set of voters or learners.
pub fn remove(&mut self, id: u64) -> Option<Progress> {
self.configuration.voters.remove(&id);
self.configuration.learners.remove(&id);
let removed = self.progress.remove(&id);
self.assert_progress_and_configuration_consistent();
removed
}
/// Promote a learner to a peer.
pub fn promote_learner(&mut self, id: u64) -> Result<(), Error> {
if !self.configuration.learners.remove(&id) {
// Wasn't already a learner. We can't promote what doesn't exist.
return Err(Error::NotExists(id, "learners"));
}
if !self.configuration.voters.insert(id) {
// Already existed, the caller should know this was a noop.
return Err(Error::Exists(id, "voters"));
}
self.assert_progress_and_configuration_consistent();
Ok(())
}
#[inline(always)]
fn assert_progress_and_configuration_consistent(&self) {
debug_assert!(self
.configuration
.voters
.union(&self.configuration.learners)
.all(|v| self.progress.contains_key(v)));
debug_assert!(self
.progress
.keys()
.all(|v| self.configuration.learners.contains(v)
|| self.configuration.voters.contains(v)));
assert_eq!(
self.configuration.voters.len() + self.configuration.learners.len(),
self.progress.len()
);
}
/// Returns the maximal committed index for the cluster.
///
/// Eg. If the matched indexes are [2,2,2,4,5], it will return 2.
pub fn maximal_committed_index(&self) -> u64 {
let mut matched = self.sort_buffer.borrow_mut();
matched.clear();
self.voters().for_each(|(_id, peer)| {
matched.push(peer.matched);
});
// Reverse sort.
matched.sort_by(|a, b| b.cmp(a));
// Smallest that the majority has commited.
matched[matched.len() / 2]
}
/// Returns the Candidate's eligibility in the current election.
///
/// If it is still eligible, it should continue polling nodes and checking.
/// Eventually, the election will result in this returning either `Elected`
/// or `Ineligible`, meaning the election can be concluded.
pub fn candidacy_status<'a>(
&self,
id: u64,
votes: impl IntoIterator<Item = (&'a u64, &'a bool)>,
) -> CandidacyStatus {
let (accepted, total) =
votes
.into_iter()
.fold((0, 0), |(mut accepted, mut total), (_, nominated)| {
if *nominated {
accepted += 1;
}
total += 1;
(accepted, total)
});
let quorum = majority(self.voter_ids().len());
let rejected = total - accepted;
info!(
"{} [quorum: {}] has received {} votes and {} vote rejections",
id, quorum, accepted, rejected,
);
if accepted >= quorum {
CandidacyStatus::Elected
} else if rejected == quorum {
CandidacyStatus::Ineligible
} else {
CandidacyStatus::Eligible
}
}
/// Determines if the current quorum is active according to the this raft node.
/// Doing this will set the `recent_active` of each peer to false.
///
/// This should only be called by the leader.
pub fn quorum_recently_active(&mut self, perspective_of: u64) -> bool {
let mut active = 0;
for (&id, pr) in self.voters_mut() {
if id == perspective_of {
active += 1;
continue;
}
if pr.recent_active {
active += 1;
}
pr.recent_active = false;
}
for (&_id, pr) in self.learners_mut() {
pr.recent_active = false;
}
active >= majority(self.voter_ids().len())
}
/// Determine if a quorum is formed from the given set of nodes.
pub fn has_quorum(&self, potential_quorum: &HashSet<u64>) -> bool {
potential_quorum.len() >= majority(self.voter_ids().len())
}
}
/// The progress of catching up from a restart.
#[derive(Debug, Clone, PartialEq)]
pub struct Progress {
/// How much state is matched.
pub matched: u64,
/// The next index to apply
pub next_idx: u64,
/// When in ProgressStateProbe, leader sends at most one replication message
/// per heartbeat interval. It also probes actual progress of the follower.
///
/// When in ProgressStateReplicate, leader optimistically increases next
/// to the latest entry sent after sending replication message. This is
/// an optimized state for fast replicating log entries to the follower.
///
/// When in ProgressStateSnapshot, leader should have sent out snapshot
/// before and stop sending any replication message.
pub state: ProgressState,
/// Paused is used in ProgressStateProbe.
/// When Paused is true, raft should pause sending replication message to this peer.
pub paused: bool,
/// This field is used in ProgressStateSnapshot.
/// If there is a pending snapshot, the pendingSnapshot will be set to the
/// index of the snapshot. If pendingSnapshot is set, the replication process of
/// this Progress will be paused. raft will not resend snapshot until the pending one
/// is reported to be failed.
pub pending_snapshot: u64,
/// This is true if the progress is recently active. Receiving any messages
/// from the corresponding follower indicates the progress is active.
/// RecentActive can be reset to false after an election timeout.
pub recent_active: bool,
/// Inflights is a sliding window for the inflight messages.
/// When inflights is full, no more message should be sent.
/// When a leader sends out a message, the index of the last
/// entry should be added to inflights. The index MUST be added
/// into inflights in order.
/// When a leader receives a reply, the previous inflights should
/// be freed by calling inflights.freeTo.
pub ins: Inflights,
}
impl Progress {
/// Creates a new progress with the given settings.
pub fn new(next_idx: u64, ins_size: usize) -> Self {
Progress {
matched: 0,
next_idx,
state: ProgressState::default(),
paused: false,
pending_snapshot: 0,
recent_active: false,
ins: Inflights::new(ins_size),
}
}
fn reset_state(&mut self, state: ProgressState) {
self.paused = false;
self.pending_snapshot = 0;
self.state = state;
self.ins.reset();
}
pub(crate) fn reset(&mut self, next_idx: u64) {
self.matched = 0;
self.next_idx = next_idx;
self.state = ProgressState::default();
self.paused = false;
self.pending_snapshot = 0;
self.recent_active = false;
debug_assert!(self.ins.cap() != 0);
self.ins.reset();
}
/// Changes the progress to a probe.
pub fn become_probe(&mut self) {
// If the original state is ProgressStateSnapshot, progress knows that
// the pending snapshot has been sent to this peer successfully, then
// probes from pendingSnapshot + 1.
if self.state == ProgressState::Snapshot {
let pending_snapshot = self.pending_snapshot;
self.reset_state(ProgressState::Probe);
self.next_idx = cmp::max(self.matched + 1, pending_snapshot + 1);
} else {
self.reset_state(ProgressState::Probe);
self.next_idx = self.matched + 1;
}
}
/// Changes the progress to a Replicate.
pub fn become_replicate(&mut self) {
self.reset_state(ProgressState::Replicate);
self.next_idx = self.matched + 1;
}
/// Changes the progress to a snapshot.
pub fn become_snapshot(&mut self, snapshot_idx: u64) {
self.reset_state(ProgressState::Snapshot);
self.pending_snapshot = snapshot_idx;
}
/// Sets the snapshot to failure.
pub fn snapshot_failure(&mut self) {
self.pending_snapshot = 0;
}
/// Unsets pendingSnapshot if Match is equal or higher than
/// the pendingSnapshot
pub fn maybe_snapshot_abort(&self) -> bool {
self.state == ProgressState::Snapshot && self.matched >= self.pending_snapshot
}
/// Returns false if the given n index comes from an outdated message.
/// Otherwise it updates the progress and returns true.
pub fn maybe_update(&mut self, n: u64) -> bool {
let need_update = self.matched < n;
if need_update {
self.matched = n;
self.resume();
};
if self.next_idx < n + 1 {
self.next_idx = n + 1
}
need_update
}
/// Optimistically advance the index
pub fn optimistic_update(&mut self, n: u64) {
self.next_idx = n + 1;
}
/// Returns false if the given index comes from an out of order message.
/// Otherwise it decreases the progress next index to min(rejected, last)
/// and returns true.
pub fn maybe_decr_to(&mut self, rejected: u64, last: u64) -> bool {
if self.state == ProgressState::Replicate {
// the rejection must be stale if the progress has matched and "rejected"
// is smaller than "match".
if rejected <= self.matched {
return false;
}
self.next_idx = self.matched + 1;
return true;
}
// the rejection must be stale if "rejected" does not match next - 1
if self.next_idx == 0 || self.next_idx - 1 != rejected {
return false;
}
self.next_idx = cmp::min(rejected, last + 1);
if self.next_idx < 1 {
self.next_idx = 1;
}
self.resume();
true
}
/// Determine whether progress is paused.
pub fn is_paused(&self) -> bool {
match self.state {
ProgressState::Probe => self.paused,
ProgressState::Replicate => self.ins.full(),
ProgressState::Snapshot => true,
}
}
/// Resume progress
pub fn resume(&mut self) {
self.paused = false;
}
/// Pause progress.
pub fn pause(&mut self) {
self.paused = true;
}
}
/// A buffer of inflight messages.
#[derive(Debug, Clone, PartialEq)]
pub struct Inflights {
// the starting index in the buffer
start: usize,
// number of inflights in the buffer
count: usize,
// ring buffer
buffer: Vec<u64>,
}
impl Inflights {
/// Creates a new buffer for inflight messages.
pub fn new(cap: usize) -> Inflights {
Inflights {
buffer: Vec::with_capacity(cap),
start: 0,
count: 0,
}
}
/// Returns true if the inflights is full.
pub fn full(&self) -> bool {
self.count == self.cap()
}
/// The buffer capacity.
pub fn cap(&self) -> usize {
self.buffer.capacity()
}
/// Adds an inflight into inflights
pub fn add(&mut self, inflight: u64) {
if self.full() {
panic!("cannot add into a full inflights")
}
let mut next = self.start + self.count;
if next >= self.cap() {
next -= self.cap();
}
assert!(next <= self.buffer.len());
if next == self.buffer.len() {
self.buffer.push(inflight);
} else {
self.buffer[next] = inflight;
}
self.count += 1;
}
/// Frees the inflights smaller or equal to the given `to` flight.
pub fn free_to(&mut self, to: u64) {
if self.count == 0 || to < self.buffer[self.start] {
// out of the left side of the window
return;
}
let mut i = 0usize;
let mut idx = self.start;
while i < self.count {
if to < self.buffer[idx] {
// found the first large inflight
break;
}
// increase index and maybe rotate
idx += 1;
if idx >= self.cap() {
idx -= self.cap();
}
i += 1;
}
// free i inflights and set new start index
self.count -= i;
self.start = idx;
}
/// Frees the first buffer entry.
pub fn free_first_one(&mut self) {
let start = self.buffer[self.start];
self.free_to(start);
}
/// Frees all inflights.
pub fn reset(&mut self) {
self.count = 0;
self.start = 0;
}
}
#[cfg(test)]
mod test {
use progress::Inflights;
use setup_for_test;
#[test]
fn test_inflight_add() {
setup_for_test();
let mut inflight = Inflights::new(10);
for i in 0..5 {
inflight.add(i);
}
let wantin = Inflights {
start: 0,
count: 5,
buffer: vec![0, 1, 2, 3, 4],
};
assert_eq!(inflight, wantin);
for i in 5..10 {
inflight.add(i);
}
let wantin2 = Inflights {
start: 0,
count: 10,
buffer: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin2);
let mut inflight2 = Inflights::new(10);
inflight2.start = 5;
inflight2.buffer.extend_from_slice(&[0, 0, 0, 0, 0]);
for i in 0..5 {
inflight2.add(i);
}
let wantin21 = Inflights {
start: 5,
count: 5,
buffer: vec![0, 0, 0, 0, 0, 0, 1, 2, 3, 4],
};
assert_eq!(inflight2, wantin21);
for i in 5..10 {
inflight2.add(i);
}
let wantin22 = Inflights {
start: 5,
count: 10,
buffer: vec![5, 6, 7, 8, 9, 0, 1, 2, 3, 4],
};
assert_eq!(inflight2, wantin22);
}
#[test]
fn test_inflight_free_to() {
setup_for_test();
let mut inflight = Inflights::new(10);
for i in 0..10 {
inflight.add(i);
}
inflight.free_to(4);
let wantin = Inflights {
start: 5,
count: 5,
buffer: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin);
inflight.free_to(8);
let wantin2 = Inflights {
start: 9,
count: 1,
buffer: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin2);
for i in 10..15 {
inflight.add(i);
}
inflight.free_to(12);
let wantin3 = Inflights {
start: 3,
count: 2,
buffer: vec![10, 11, 12, 13, 14, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin3);
inflight.free_to(14);
let wantin4 = Inflights {
start: 5,
count: 0,
buffer: vec![10, 11, 12, 13, 14, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin4);
}
#[test]
fn test_inflight_free_first_one() {
setup_for_test();
let mut inflight = Inflights::new(10);
for i in 0..10 {
inflight.add(i);
}
inflight.free_first_one();
let wantin = Inflights {
start: 1,
count: 9,
buffer: vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
};
assert_eq!(inflight, wantin);
}
}
// TODO: Reorganize this whole file into separate files.
// See https://github.com/pingcap/raft-rs/issues/125
#[cfg(test)]
mod test_progress_set {
use Result;
use {Progress, ProgressSet};
const CANARY: u64 = 123;
#[test]
fn test_insert_redundant_voter() -> Result<()> {
let mut set = ProgressSet::default();
let default_progress = Progress::new(0, 256);
let mut canary_progress = Progress::new(0, 256);
canary_progress.matched = CANARY;
set.insert_voter(1, default_progress.clone())?;
assert!(
set.insert_voter(1, canary_progress).is_err(),
"Should return an error on redundant insert."
);
assert_eq!(
*set.get(1).expect("Should be inserted."),
default_progress,
"The ProgressSet was mutated in a `insert_voter` that returned error."
);
Ok(())
}
#[test]
fn test_insert_redundant_learner() -> Result<()> {
let mut set = ProgressSet::default();
let default_progress = Progress::new(0, 256);
let mut canary_progress = Progress::new(0, 256);
canary_progress.matched = CANARY;
set.insert_learner(1, default_progress.clone())?;
assert!(
set.insert_learner(1, canary_progress).is_err(),
"Should return an error on redundant insert."
);
assert_eq!(
*set.get(1).expect("Should be inserted."),
default_progress,
"The ProgressSet was mutated in a `insert_learner` that returned error."
);
Ok(())
}
#[test]
fn test_insert_learner_that_is_voter() -> Result<()> {
let mut set = ProgressSet::default();
let default_progress = Progress::new(0, 256);
let mut canary_progress = Progress::new(0, 256);
canary_progress.matched = CANARY;
set.insert_voter(1, default_progress.clone())?;
assert!(
set.insert_learner(1, canary_progress).is_err(),
"Should return an error on invalid learner insert."
);
assert_eq!(
*set.get(1).expect("Should be inserted."),
default_progress,
"The ProgressSet was mutated in a `insert_learner` that returned error."
);
Ok(())
}
#[test]
fn test_insert_voter_that_is_learner() -> Result<()> {
let mut set = ProgressSet::default();
let default_progress = Progress::new(0, 256);
let mut canary_progress = Progress::new(0, 256);
canary_progress.matched = CANARY;
set.insert_learner(1, default_progress.clone())?;
assert!(
set.insert_voter(1, canary_progress).is_err(),
"Should return an error on invalid voter insert."
);
assert_eq!(
*set.get(1).expect("Should be inserted."),
default_progress,
"The ProgressSet was mutated in a `insert_voter` that returned error."
);
Ok(())
}
#[test]
fn test_promote_learner() -> Result<()> {
let mut set = ProgressSet::default();
let default_progress = Progress::new(0, 256);
set.insert_voter(1, default_progress)?;
let pre = set.get(1).expect("Should have been inserted").clone();
assert!(
set.promote_learner(1).is_err(),
"Should return an error on invalid promote_learner."
);
assert!(
set.promote_learner(2).is_err(),
"Should return an error on invalid promote_learner."
);
assert_eq!(pre, *set.get(1).expect("Peer should not have been deleted"));
Ok(())
}
}
|
// --- Day 7: Handy Haversacks ---
// You land at the regional airport in time for your next flight. In
// fact, it looks like you'll even have time to grab some food: all
// flights are currently delayed due to issues in luggage processing.
// Due to recent aviation regulations, many rules (your puzzle input)
// are being enforced about bags and their contents; bags must be
// color-coded and must contain specific quantities of other
// color-coded bags. Apparently, nobody responsible for these
// regulations considered how long they would take to enforce!
// For example, consider the following rules:
// light red bags contain 1 bright white bag, 2 muted yellow bags.
// dark orange bags contain 3 bright white bags, 4 muted yellow bags.
// bright white bags contain 1 shiny gold bag.
// muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.
// shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.
// dark olive bags contain 3 faded blue bags, 4 dotted black bags.
// vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.
// faded blue bags contain no other bags.
// dotted black bags contain no other bags.
// These rules specify the required contents for 9 bag types. In this
// example, every faded blue bag is empty, every vibrant plum bag
// contains 11 bags (5 faded blue and 6 dotted black), and so on.
// You have a shiny gold bag. If you wanted to carry it in at least
// one other bag, how many different bag colors would be valid for the
// outermost bag? (In other words: how many colors can, eventually,
// contain at least one shiny gold bag?)
// In the above rules, the following options would be available to you:
// A bright white bag, which can hold your shiny gold bag directly.
// A muted yellow bag, which can hold your shiny gold bag directly,
// plus some other bags.
// A dark orange bag, which can hold bright white and muted yellow
// bags, either of which could then hold your shiny gold bag.
// A light red bag, which can hold bright white and muted yellow bags,
// either of which could then hold your shiny gold bag.
// So, in this example, the number of bag colors that can eventually
// contain at least one shiny gold bag is 4.
// How many bag colors can eventually contain at least one shiny gold
// bag? (The list of rules is quite long; make sure you get all of
// it.)
// --- Part Two ---
// It's getting pretty expensive to fly these days - not because of
// ticket prices, but because of the ridiculous number of bags you
// need to buy!
// Consider again your shiny gold bag and the rules from the above
// example:
// faded blue bags contain 0 other bags.
// dotted black bags contain 0 other bags.
// vibrant plum bags contain 11 other bags: 5 faded blue bags and 6
// dotted black bags.
// dark olive bags contain 7 other bags: 3 faded blue bags and 4
// dotted black bags.
// So, a single shiny gold bag must contain 1 dark olive bag (and the
// 7 bags within it) plus 2 vibrant plum bags (and the 11 bags within
// each of those): 1 + 1*7 + 2 + 2*11 = 32 bags!
// Of course, the actual rules have a small chance of going several
// levels deeper than this example; be sure to count all of the bags,
// even if the nesting becomes topologically impractical!
// Here's another example:
// shiny gold bags contain 2 dark red bags.
// dark red bags contain 2 dark orange bags.
// dark orange bags contain 2 dark yellow bags.
// dark yellow bags contain 2 dark green bags.
// dark green bags contain 2 dark blue bags.
// dark blue bags contain 2 dark violet bags.
// dark violet bags contain no other bags.
// In this example, a single shiny gold bag must contain 126 other
// bags.
// How many individual bags are required inside your single shiny gold
// bag?
use petgraph::{algo::all_simple_paths, prelude::DiGraph, Direction};
use std::collections::{HashMap, HashSet};
use std::path::Path;
pub fn part1() -> u32 {
let path = Path::new("day7-input.txt");
let input = std::fs::read_to_string(path).expect("read");
let rules = input.as_str().lines();
let mut all_nodes = HashSet::new();
let mut all_edges = Vec::new();
for (node, mut edges) in rules.map(parse_rule) {
all_nodes.insert(node);
all_edges.append(&mut edges);
}
let mut nodeids = HashMap::new();
for (index, node) in all_nodes.iter().enumerate() {
nodeids.insert(node.to_string(), index as u32);
}
let graph = DiGraph::<u32, u32, _>::from_edges(
all_edges
.iter()
.map(|(a, s, w)| (nodeid(&nodeids, a), nodeid(&nodeids, s), w)),
);
count_to(graph, nodeid(&nodeids, "shiny gold"))
}
pub fn part2() -> u32 {
let path = Path::new("day7-input.txt");
let input = std::fs::read_to_string(path).expect("read");
let rules = input.as_str().lines();
let mut all_nodes = HashSet::new();
let mut all_edges = Vec::new();
for (node, mut edges) in rules.map(parse_rule) {
all_nodes.insert(node);
all_edges.append(&mut edges);
}
let mut nodeids = HashMap::new();
for (index, node) in all_nodes.iter().enumerate() {
nodeids.insert(node.to_string(), index as u32);
}
let graph = DiGraph::<u32, u32>::from_edges(
all_edges
.iter()
.map(|(a, s, w)| (nodeid(&nodeids, a), nodeid(&nodeids, s), w)),
);
sum_from(&graph, nodeid(&nodeids, "shiny gold").into(), 1) - 1
}
fn parse_rule(rule: &str) -> (String, Vec<(String, String, u32)>) {
let a_rest: Vec<_> = rule.split(" bags contain ").collect();
let (a, rest) = (a_rest[0], a_rest[1]);
let w_ds: Vec<(u32, String)> = match rest {
"no other bags." => vec![],
other => other
.split(',')
.map(|w_d| {
let w_d1_d2_rest: Vec<_> = w_d.split_whitespace().collect();
let (w, d1, d2, _) = (
w_d1_d2_rest[0],
w_d1_d2_rest[1],
w_d1_d2_rest[2],
w_d1_d2_rest[3],
);
let mut d = d1.to_string();
d.push_str(" ");
d.push_str(d2);
(w.parse().expect("u32-parse"), d)
})
.collect(),
};
(
a.to_string(),
w_ds.into_iter()
.map(|(w, d)| (a.to_string(), d, w))
.collect(),
)
}
fn count_to(graph: DiGraph<u32, u32>, dindex: u32) -> u32 {
let mut nodes = HashSet::new();
for sindex in graph.externals(Direction::Incoming) {
let paths = all_simple_paths::<Vec<_>, _>(&graph, sindex, dindex.into(), 0, None);
for path in paths {
for node in path {
nodes.insert(node);
}
}
}
let count = nodes.len();
if count > 0 {
(count - 1) as u32
} else {
0
}
}
fn sum_from(graph: &DiGraph<u32, u32>, start: petgraph::prelude::NodeIndex, weight: u32) -> u32 {
let edges = graph.edges(start);
let nodes = graph.neighbors(start);
let mut sum = 0;
for (edge, node) in edges.zip(nodes) {
sum += sum_from(&graph, node, *edge.weight());
}
weight + weight * sum
}
fn nodeid(nodeids: &HashMap<String, u32>, node: &str) -> u32 {
*nodeids.get(node).expect("get")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(124, part1())
}
#[test]
fn test_part2() {
assert_eq!(34862, part2())
}
}
|
extern crate toml;
extern crate rustc_serialize;
mod config;
fn main()
{
let b = config::Config::parse("/etc/fstab");
println!("{:?}",b);
}
|
/*
https://projecteuler.net
The nth term of the sequence of triangle numbers is given by,
tn = ½n(n+1); so the first ten triangle numbers are:
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
By converting each letter in a word to a number corresponding to its
alphabetical position and adding these values we form a word
value. For example, the word value for SKY is 19 + 11 + 25 = 55 =
t10. If the word value is a triangle number then we shall call the
word a triangle word.
Using words.txt (right click and 'Save Link/Target As...'), a 16K text
file containing nearly two-thousand common English words, how many are
triangle words?
NOTES:
*/
fn triangle_number_seq() -> Vec::<u64> {
const MAX : usize = 30;
let mut rv = Vec::<u64>::with_capacity(MAX);
for n in 1..=MAX {
rv.push( ((n*n + n) / 2) as u64 );
}
rv
}
#[test]
fn test_tns() {
assert!(triangle_number_seq()[5] == 21);
}
fn solve() -> u64 {
let args: std::vec::Vec<String> = std::env::args().collect();
if args.len() != 2 {
panic!("needs a single argument");
}
let path = std::path::Path::new(&args[1]);
// Open the path in read-only mode, returns `io::Result<File>`
let infile = match std::fs::File::open(path) {
Err(why) => panic!("couldn't open {}: {}", path.display(), why),
Ok(infile) => infile,
};
let mut words = std::vec::Vec::<String>::with_capacity(46*1024/4);
let mut s = String::new();
let reader = std::io::BufReader::new(infile);
use std::io::prelude::*; // needed for .bytes()
for byte in reader.bytes() {
let c = byte.unwrap() as char;
if c == ',' {
// do nothing for this
}
else if c == '"' {
if !s.is_empty() {
words.push(s);
s = String::new();
}
}
else {
s.push(c);
}
}
let tns = triangle_number_seq();
let mut rv = 0;
for w in words {
let mut s = 0;
for c in w.as_bytes() {
s += (c - b'@') as u64;
}
if let Ok(_) = tns.binary_search(&s) {
rv += 1;
}
}
return rv;
}
fn main() {
let start_time = std::time::Instant::now();
let sol = solve();
let elapsed = start_time.elapsed().as_micros();
println!("\nSolution: {}", sol);
let mut remain = elapsed;
let mut s = String::new();
if remain == 0 {
s.insert(0,'0');
}
while remain > 0 {
let temp = remain%1000;
remain /= 1000;
if remain > 0 {
s = format!(",{:03}",temp) + &s;
}
else {
s = format!("{}",temp) + &s;
}
}
println!("Elasped time: {} us", s);
}
|
extern crate racer;
extern crate racer_testutils;
use racer::Coordinate;
use racer_testutils::*;
#[test]
fn complets_static_methods_for_alias() {
let src = r#"
mod mymod {
pub struct St {
mem1: String,
mem2: usize,
}
impl St {
pub fn new() -> Self {
St { mem1: "a".to_owned(), mem2: 5 }
}
}
}
fn main() {
use mymod::St as S;
let s = S::ne~
}
"#;
assert_eq!(get_only_completion(src, None).matchstr, "new");
}
#[test]
fn finds_definition_of_use_as() {
let src = r#"
mod mymod {
pub struct St {
mem1: String,
mem2: usize,
}
}
fn main() {
use mymod::St as S;
let s = S~::new();
}
"#;
let got = get_definition(src, None);
assert_eq!(got.matchstr, "S");
assert_eq!(got.coords.unwrap(), Coordinate::new(9, 29));
}
// moved from system.rs
#[test]
fn follows_use_as() {
let src2 = "
pub fn myfn() {}
pub fn foo() {}
";
let src = "
use src2::myfn as myfoofn;
mod src2;
fn main() {
my~foofn();
}
";
let dir = TmpDir::new();
let _src2 = dir.write_file("src2.rs", src2);
let got = get_definition(src, Some(dir));
assert_eq!(got.matchstr, "myfoofn");
assert_eq!(got.contextstr, "src2::myfn as myfoofn");
}
// moved from system.rs
/// Verifies fix for https://github.com/racer-rust/racer/issues/753
#[test]
fn follows_use_as_in_braces() {
let src = "
mod m {
pub struct Wrapper {
pub x: i32,
}
pub struct Second {
pub y: i32,
}
}
fn main() {
use m::{Wrapper as Wpr, Second};
let _ = W~pr { x: 1 };
}
";
let got = get_definition(src, None);
assert_eq!(got.matchstr, "Wpr");
assert_eq!(got.contextstr, "Wrapper as Wpr");
}
|
use iced::widget::{button, column, radio, row, text_input};
use iced::widget::{Column, Row};
use iced::{window, Alignment, Element, Sandbox, Settings};
use std::fs::{self, File};
use std::io::Write;
use cubes::{project_dir_cubes, Puzzle};
pub fn main() -> iced::Result {
PolycubePieces::run(Settings {
window: window::Settings {
size: (210, 500),
..Default::default()
},
..Default::default()
})
}
struct PolycubePieces {
name: String,
cube: [[[bool; 3]; 3]; 3],
pieces: Puzzle,
}
impl PolycubePieces {
fn sum_cubes(&self) -> i32 {
let mut sum = 0;
for x in 0..3 {
for y in 0..3 {
for z in 0..3 {
if self.cube[x][y][z] {
sum += 1;
}
}
}
}
sum
}
fn sum_pieces(&self) -> i32 {
let mut sum = 0;
for piece in &self.pieces.data {
for _cube in piece {
sum += 1;
}
}
sum
}
}
impl Default for PolycubePieces {
fn default() -> Self {
Self {
name: "".to_owned(),
cube: [[[false; 3]; 3]; 3],
pieces: Puzzle { data: Vec::new() },
}
}
}
#[derive(Clone, Debug)]
enum Message {
NameChanged(String),
SelectedPiece(usize),
SavePiecePressed,
SaveAllPressed,
}
impl Sandbox for PolycubePieces {
type Message = Message;
fn new() -> Self {
PolycubePieces::default()
}
fn title(&self) -> String {
String::from("Polycube Pieces")
}
fn update(&mut self, message: Message) {
match message {
Message::NameChanged(name) => {
self.name = name;
}
Message::SelectedPiece(i) => {
let x = i / 9;
let y = (i / 3) % 3;
let z = i % 3;
self.cube[x][y][z] = !self.cube[x][y][z];
}
Message::SavePiecePressed => {
self.pieces.data.push(Vec::new());
let len = self.pieces.data.len() - 1;
for x in 0..3 {
for y in 0..3 {
for z in 0..3 {
if self.cube[x][y][z] {
println!("{x} {y} {z}");
self.pieces.data[len].push([x as i32, y as i32, z as i32]);
}
}
}
}
println!();
}
Message::SaveAllPressed => {
let proj_dirs = project_dir_cubes().expect("expected a cubes directory");
let dir = proj_dirs.data_dir();
let path = dir.join("puzzles");
fs::create_dir_all(&path).unwrap();
let mut buffer = File::create(path.join(&self.name)).unwrap();
let encoded: Vec<u8> = bincode::serialize(&self.pieces).unwrap();
buffer.write_all(&encoded).unwrap();
println!("saved {}", self.name);
}
}
}
fn view(&self) -> Element<Message> {
let name = row!["Name: ", text_input("", &self.name, Message::NameChanged),]
.padding(10)
.align_items(Alignment::Start);
let mut pieces_matrix = Vec::new();
for x in 0..3 {
if x > 0 {
pieces_matrix.push(Row::new().padding(10).into());
}
for y in 0..3 {
let mut row = Vec::new();
for z in 0..3 {
let i = 9 * x + 3 * y + z;
let mut cube_selected = None;
if self.cube[x][y][z] {
cube_selected = Some(i);
}
row.push(radio("", i, cube_selected, Message::SelectedPiece).into());
}
pieces_matrix.push(Row::with_children(row).into());
}
}
let pieces_col = Column::with_children(pieces_matrix).padding(10).spacing(10);
let save_piece = if self.sum_pieces() >= 27
|| self.sum_cubes() < 1
|| self.sum_cubes() + self.sum_pieces() > 27
{
button("Save Piece")
} else {
button("Save Piece").on_press(Message::SavePiecePressed)
};
let save_all = if self.sum_pieces() != 27 || self.name.is_empty() {
button("Save All")
} else {
button("Save All").on_press(Message::SaveAllPressed)
};
let buttons = row![save_piece, save_all]
.padding(10)
.spacing(10)
.align_items(Alignment::Start);
column![name, pieces_col, buttons]
.align_items(Alignment::Start)
.into()
}
}
|
// Vicfred
// https://codeforces.com/problemset/problem/1285/C
// math
use std::io;
fn gcd(a: i64, b: i64) -> i64 {
if b == 0 {
a
} else {
gcd(b,a%b)
}
}
fn lcm(a: i64, b: i64) -> i64 {
(a*b)/gcd(a,b)
}
fn main() {
let mut n = String::new();
io::stdin()
.read_line(&mut n)
.unwrap();
let n: i64 = n.trim().parse().unwrap();
let mut maxima = -1;
let limit = f64::sqrt(n as f64) as i64;
for d in 1..=limit {
if n%d == 0 {
if lcm(d,n/d) == n {
maxima = std::cmp::max(maxima,d);
}
}
}
println!("{} {}", maxima, n/maxima);
}
|
use anyhow::{bail, Result};
use std::io::Write;
use std::path::Path;
use std::process::Command;
use tempfile::NamedTempFile;
fn run_wasmtime(args: &[&str]) -> Result<String> {
let mut me = std::env::current_exe()?;
me.pop(); // chop off the file name
me.pop(); // chop off `deps`
me.push("wasmtime");
let output = Command::new(&me).args(args).output()?;
if !output.status.success() {
bail!(
"Failed to execute wasmtime with: {:?}\n{}",
args,
String::from_utf8_lossy(&output.stderr)
);
}
Ok(String::from_utf8(output.stdout).unwrap())
}
fn build_wasm(wat_path: impl AsRef<Path>) -> Result<NamedTempFile> {
let mut wasm_file = NamedTempFile::new()?;
let wasm = wat::parse_file(wat_path)?;
wasm_file.write(&wasm)?;
Ok(wasm_file)
}
// Very basic use case: compile binary wasm file and run specific function with arguments.
#[test]
fn run_wasmtime_simple() -> Result<()> {
let wasm = build_wasm("tests/wasm/simple.wat")?;
run_wasmtime(&[
"run",
wasm.path().to_str().unwrap(),
"--invoke",
"simple",
"--disable-cache",
"4",
])?;
Ok(())
}
// Wasmtime shakk when not enough arguments were provided.
#[test]
fn run_wasmtime_simple_fail_no_args() -> Result<()> {
let wasm = build_wasm("tests/wasm/simple.wat")?;
assert!(
run_wasmtime(&[
"run",
wasm.path().to_str().unwrap(),
"--disable-cache",
"--invoke",
"simple",
])
.is_err(),
"shall fail"
);
Ok(())
}
// Running simple wat
#[test]
fn run_wasmtime_simple_wat() -> Result<()> {
let wasm = build_wasm("tests/wasm/simple.wat")?;
run_wasmtime(&[
"run",
wasm.path().to_str().unwrap(),
"--invoke",
"simple",
"--disable-cache",
"4",
])?;
Ok(())
}
|
use std::io;
use std::io::BufRead;
use std::io::BufReader;
fn main() {
let reader = BufReader::new(io::stdin());
let map: Vec<Vec<u8>> = reader
.lines()
.map(|line| line.unwrap().as_bytes().to_vec())
.collect();
let mut y = 0i32;
let mut x = 0i32;
let mut dir: (i32, i32) = (0, 1);
for (i, &c) in map[y as usize].iter().enumerate() {
if c == b'|' {
x = i as i32;
}
}
let mut letters = String::new();
let mut cnt = 0;
loop {
let c = map[y as usize][x as usize];
if c == b' ' {
break;
} else if c.is_ascii_alphabetic() {
letters.push(c as char);
} else if c == b'+' {
let up_down = [(0, -1), (0, 1)];
let left_right = [(-1, 0), (1, 0)];
for &d in &if dir.0 == 0 { left_right } else { up_down } {
if map[(y + d.1) as usize][(x + d.0) as usize] != b' ' {
dir = d;
break;
}
}
}
x += dir.0;
y += dir.1;
cnt += 1;
}
println!("part 1: {}", letters);
println!("part 2: {}", cnt);
}
|
use std::fmt::Debug;
use std::sync::Arc;
use crate::prelude::*;
use crate::math::*;
use crate::math::Transform;
use crate::interaction::SurfaceInteraction;
mod cylinder;
pub use self::cylinder::Cylinder;
mod sphere;
pub use self::sphere::Sphere;
#[derive(Clone, Debug)]
pub struct ShapeData {
pub object_to_world: Arc<Transform>,
pub world_to_object: Arc<Transform>,
pub reverse_orientation: bool,
}
impl ShapeData {
pub fn new(object_to_world: Arc<Transform>, reverse_orientation: bool) -> Self {
Self {
world_to_object: Arc::new(object_to_world.inverse()),
object_to_world,
reverse_orientation,
}
}
}
pub trait Shape: Debug {
fn data(&self) -> &ShapeData;
fn object_to_world(&self) -> &Arc<Transform> {
&self.data().object_to_world
}
fn world_to_object(&self) -> &Arc<Transform> {
&self.data().world_to_object
}
fn reverse_orientation(&self) -> bool {
self.data().reverse_orientation
}
fn transform_swaps_handedness(&self) -> bool {
self.object_to_world().swaps_handedness()
}
fn object_bounds(&self) -> Bounds3f;
fn world_bound(&self) -> Bounds3f {
self.object_to_world().transform_bounds(self.object_bounds())
}
/// If the `Ray` intersects, returns both the distance and the `SurfaceInteraction`.
fn intersect(&'a self, ray: &Ray, test_alpha_texture: bool) -> Option<(Float, SurfaceInteraction<'a>)>;
fn intersect_p(&self, ray: &Ray, test_alpha_texture: bool) -> bool {
self.intersect(ray, test_alpha_texture).is_some()
}
fn area(&self) -> Float;
}
|
use std::cmp;
use parser::{FileHash, Register};
use crate::print::{DiffList, DiffState, Print, PrintState, ValuePrinter};
use crate::Result;
pub(crate) fn print_list(state: &mut PrintState, mut registers: Vec<Register>) -> Result<()> {
registers.sort_unstable();
registers.dedup();
state.field_expanded("registers", |state| state.list(&(), ®isters))?;
Ok(())
}
pub(crate) fn diff_list(
state: &mut DiffState,
mut registers_a: Vec<Register>,
mut registers_b: Vec<Register>,
) -> Result<()> {
registers_a.sort_unstable();
registers_a.dedup();
registers_b.sort_unstable();
registers_b.dedup();
state.field_expanded("registers", |state| {
state.list(&(), ®isters_a, &(), ®isters_b)
})?;
Ok(())
}
pub(crate) fn print(register: Register, w: &mut dyn ValuePrinter, hash: &FileHash) -> Result<()> {
match register.name(hash) {
Some(name) => write!(w, "{}", name)?,
None => write!(w, "r{}", register.0)?,
};
Ok(())
}
impl Print for Register {
type Arg = ();
fn print(&self, state: &mut PrintState, _arg: &()) -> Result<()> {
state.line(|w, hash| print(*self, w, hash))
}
fn diff(state: &mut DiffState, _arg_a: &(), a: &Self, _arg_b: &(), b: &Self) -> Result<()> {
state.line(a, b, |w, hash, x| print(*x, w, hash))
}
}
impl DiffList for Register {
fn step_cost(&self, _state: &DiffState, _arg: &()) -> usize {
1
}
fn diff_cost(_state: &DiffState, _unit_a: &(), a: &Self, _unit_b: &(), b: &Self) -> usize {
let mut cost = 0;
if a.0.cmp(&b.0) != cmp::Ordering::Equal {
cost += 1;
}
cost
}
}
|
use crate::hittable::*;
use crate::ray::*;
use std::rc::Rc;
pub struct HittableList {
pub objects: Vec<Rc<dyn Hittable>>,
}
impl HittableList {
pub fn new() -> Self {
Self {
objects: Vec::new(),
}
}
pub fn from(objects: Vec<Rc<dyn Hittable>>) -> Self {
Self { objects }
}
pub fn clear(&mut self) {
while self.objects.len() > 0 {
self.objects.pop();
}
}
pub fn add(&mut self, object: Rc<dyn Hittable>) {
self.objects.push(object);
}
}
impl Hittable for HittableList {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64, rec: &mut HitRecord) -> bool {
let mut temp_rec = HitRecord::void();
let mut hit_anything = false;
let mut closest_so_far = t_max;
for object in &self.objects {
if object.hit(r, t_min, closest_so_far, &mut temp_rec) {
hit_anything = true;
closest_so_far = temp_rec.clone().t;
*rec = temp_rec.clone();
}
}
hit_anything
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#[cfg(feature = "package-preview-2020-08")]
mod package_preview_2020_08;
#[cfg(feature = "package-preview-2020-08")]
pub use package_preview_2020_08::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2020-06")]
mod package_resources_2020_06;
#[cfg(feature = "package-resources-2020-06")]
pub use package_resources_2020_06::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2020-01")]
mod package_subscriptions_2020_01;
#[cfg(feature = "package-subscriptions-2020-01")]
pub use package_subscriptions_2020_01::{models, operations, API_VERSION};
#[cfg(feature = "package-deploymentscripts-2020-10")]
mod package_deploymentscripts_2020_10;
#[cfg(feature = "package-deploymentscripts-2020-10")]
pub use package_deploymentscripts_2020_10::{models, operations, API_VERSION};
#[cfg(feature = "package-deploymentscripts-2019-10-preview")]
mod package_deploymentscripts_2019_10_preview;
#[cfg(feature = "package-deploymentscripts-2019-10-preview")]
pub use package_deploymentscripts_2019_10_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-features-2015-12")]
mod package_features_2015_12;
#[cfg(feature = "package-features-2015-12")]
pub use package_features_2015_12::{models, operations, API_VERSION};
#[cfg(feature = "package-locks-2016-09")]
mod package_locks_2016_09;
#[cfg(feature = "package-locks-2016-09")]
pub use package_locks_2016_09::{models, operations, API_VERSION};
#[cfg(feature = "package-locks-2015-01")]
mod package_locks_2015_01;
#[cfg(feature = "package-locks-2015-01")]
pub use package_locks_2015_01::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2019-09")]
mod package_policy_2019_09;
#[cfg(feature = "package-policy-2019-09")]
pub use package_policy_2019_09::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2019-06")]
mod package_policy_2019_06;
#[cfg(feature = "package-policy-2019-06")]
pub use package_policy_2019_06::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2019-01")]
mod package_policy_2019_01;
#[cfg(feature = "package-policy-2019-01")]
pub use package_policy_2019_01::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2018-05")]
mod package_policy_2018_05;
#[cfg(feature = "package-policy-2018-05")]
pub use package_policy_2018_05::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2018-03")]
mod package_policy_2018_03;
#[cfg(feature = "package-policy-2018-03")]
pub use package_policy_2018_03::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2017-06")]
mod package_policy_2017_06;
#[cfg(feature = "package-policy-2017-06")]
pub use package_policy_2017_06::{models, operations, API_VERSION};
#[cfg(feature = "package-pure-policy-2017-06")]
mod package_pure_policy_2017_06;
#[cfg(feature = "package-pure-policy-2017-06")]
pub use package_pure_policy_2017_06::{models, operations, API_VERSION};
#[cfg(feature = "package-templatespecs-2019-06-preview")]
mod package_templatespecs_2019_06_preview;
#[cfg(feature = "package-templatespecs-2019-06-preview")]
pub use package_templatespecs_2019_06_preview::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2016-12")]
mod package_policy_2016_12;
#[cfg(feature = "package-policy-2016-12")]
pub use package_policy_2016_12::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2016-04")]
mod package_policy_2016_04;
#[cfg(feature = "package-policy-2016-04")]
pub use package_policy_2016_04::{models, operations, API_VERSION};
#[cfg(feature = "package-policy-2015-10")]
mod package_policy_2015_10;
#[cfg(feature = "package-policy-2015-10")]
pub use package_policy_2015_10::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-10")]
mod package_resources_2019_10;
#[cfg(feature = "package-resources-2019-10")]
pub use package_resources_2019_10::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-08")]
mod package_resources_2019_08;
#[cfg(feature = "package-resources-2019-08")]
pub use package_resources_2019_08::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-07")]
mod package_resources_2019_07;
#[cfg(feature = "package-resources-2019-07")]
pub use package_resources_2019_07::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-0510")]
mod package_resources_2019_0510;
#[cfg(feature = "package-resources-2019-0510")]
pub use package_resources_2019_0510::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-05")]
mod package_resources_2019_05;
#[cfg(feature = "package-resources-2019-05")]
pub use package_resources_2019_05::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2019-03")]
mod package_resources_2019_03;
#[cfg(feature = "package-resources-2019-03")]
pub use package_resources_2019_03::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2018-05")]
mod package_resources_2018_05;
#[cfg(feature = "package-resources-2018-05")]
pub use package_resources_2018_05::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2018-02")]
mod package_resources_2018_02;
#[cfg(feature = "package-resources-2018-02")]
pub use package_resources_2018_02::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2017-05")]
mod package_resources_2017_05;
#[cfg(feature = "package-resources-2017-05")]
pub use package_resources_2017_05::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2016-09")]
mod package_resources_2016_09;
#[cfg(feature = "package-resources-2016-09")]
pub use package_resources_2016_09::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2016-07")]
mod package_resources_2016_07;
#[cfg(feature = "package-resources-2016-07")]
pub use package_resources_2016_07::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2016-02")]
mod package_resources_2016_02;
#[cfg(feature = "package-resources-2016-02")]
pub use package_resources_2016_02::{models, operations, API_VERSION};
#[cfg(feature = "package-resources-2015-11")]
mod package_resources_2015_11;
#[cfg(feature = "package-resources-2015-11")]
pub use package_resources_2015_11::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2019-11")]
mod package_subscriptions_2019_11;
#[cfg(feature = "package-subscriptions-2019-11")]
pub use package_subscriptions_2019_11::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2019-06")]
mod package_subscriptions_2019_06;
#[cfg(feature = "package-subscriptions-2019-06")]
pub use package_subscriptions_2019_06::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2018-06")]
mod package_subscriptions_2018_06;
#[cfg(feature = "package-subscriptions-2018-06")]
pub use package_subscriptions_2018_06::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2016-06")]
mod package_subscriptions_2016_06;
#[cfg(feature = "package-subscriptions-2016-06")]
pub use package_subscriptions_2016_06::{models, operations, API_VERSION};
#[cfg(feature = "package-subscriptions-2015-11")]
mod package_subscriptions_2015_11;
#[cfg(feature = "package-subscriptions-2015-11")]
pub use package_subscriptions_2015_11::{models, operations, API_VERSION};
#[cfg(feature = "package-links-2016-09")]
mod package_links_2016_09;
#[cfg(feature = "package-links-2016-09")]
pub use package_links_2016_09::{models, operations, API_VERSION};
#[cfg(feature = "package-managedapplications-2019-07")]
mod package_managedapplications_2019_07;
#[cfg(feature = "package-managedapplications-2019-07")]
pub use package_managedapplications_2019_07::{models, operations, API_VERSION};
#[cfg(feature = "package-managedapplications-2018-06")]
mod package_managedapplications_2018_06;
#[cfg(feature = "package-managedapplications-2018-06")]
pub use package_managedapplications_2018_06::{models, operations, API_VERSION};
#[cfg(feature = "package-managedapplications-2017-09")]
mod package_managedapplications_2017_09;
#[cfg(feature = "package-managedapplications-2017-09")]
pub use package_managedapplications_2017_09::{models, operations, API_VERSION};
#[cfg(feature = "package-managedapplications-2016-09")]
mod package_managedapplications_2016_09;
#[cfg(feature = "package-managedapplications-2016-09")]
pub use package_managedapplications_2016_09::{models, operations, API_VERSION};
pub struct OperationConfig {
pub api_version: String,
pub client: reqwest::Client,
pub base_path: String,
pub token_credential: Option<Box<dyn azure_core::TokenCredential>>,
pub token_credential_resource: String,
}
impl OperationConfig {
pub fn new(token_credential: Box<dyn azure_core::TokenCredential>) -> Self {
Self {
token_credential: Some(token_credential),
..Default::default()
}
}
}
impl Default for OperationConfig {
fn default() -> Self {
Self {
api_version: API_VERSION.to_owned(),
client: reqwest::Client::new(),
base_path: "https://management.azure.com".to_owned(),
token_credential: None,
token_credential_resource: "https://management.azure.com/".to_owned(),
}
}
}
|
use std::sync::Arc;
use super::ecdsa::*;
use super::eddsa::*;
use super::error::*;
use super::handles::*;
use super::rsa::*;
use super::signature_keypair::*;
use super::signature_publickey::*;
use super::WASI_CRYPTO_CTX;
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
#[repr(u16)]
pub enum SignatureAlgorithm {
ECDSA_P256_SHA256,
ECDSA_P384_SHA384,
Ed25519,
RSA_PKCS1_2048_8192_SHA256,
RSA_PKCS1_2048_8192_SHA384,
RSA_PKCS1_2048_8192_SHA512,
RSA_PKCS1_3072_8192_SHA384,
}
#[derive(Clone, Debug)]
pub enum Signature {
ECDSA(ECDSASignature),
EdDSA(EdDSASignature),
RSA(RSASignature),
}
impl AsRef<[u8]> for Signature {
fn as_ref(&self) -> &[u8] {
match self {
Signature::ECDSA(signature) => signature.as_ref(),
Signature::EdDSA(signature) => signature.as_ref(),
Signature::RSA(signature) => signature.as_ref(),
}
}
}
impl PartialEq for Signature {
fn eq(&self, other: &Self) -> bool {
ring::constant_time::verify_slices_are_equal(self.as_ref(), other.as_ref()).is_ok()
}
}
impl Eq for Signature {}
impl Signature {
fn from_raw(alg: SignatureAlgorithm, encoded: &[u8]) -> Result<Self, Error> {
let signature = match alg {
SignatureAlgorithm::ECDSA_P256_SHA256 => {
ensure!(encoded.len() == 64, "Unexpected signature length");
Signature::ECDSA(ECDSASignature::new(
SignatureEncoding::Raw,
encoded.to_vec(),
))
}
SignatureAlgorithm::ECDSA_P384_SHA384 => {
ensure!(encoded.len() == 96, "Unexpected signature length");
Signature::ECDSA(ECDSASignature::new(
SignatureEncoding::Raw,
encoded.to_vec(),
))
}
SignatureAlgorithm::Ed25519 => {
ensure!(encoded.len() == 64, "Unexpected signature length");
Signature::EdDSA(EdDSASignature::new(encoded.to_vec()))
}
SignatureAlgorithm::RSA_PKCS1_2048_8192_SHA256 => {
Signature::RSA(RSASignature::new(encoded.to_vec()))
}
SignatureAlgorithm::RSA_PKCS1_2048_8192_SHA384 => {
Signature::RSA(RSASignature::new(encoded.to_vec()))
}
SignatureAlgorithm::RSA_PKCS1_2048_8192_SHA512 => {
Signature::RSA(RSASignature::new(encoded.to_vec()))
}
SignatureAlgorithm::RSA_PKCS1_3072_8192_SHA384 => {
Signature::RSA(RSASignature::new(encoded.to_vec()))
}
};
Ok(signature)
}
fn as_ecdsa(&self) -> Result<&ECDSASignature, Error> {
match self {
Signature::ECDSA(signature) => Ok(signature),
_ => bail!(CryptoError::InvalidSignature),
}
}
fn as_eddsa(&self) -> Result<&EdDSASignature, Error> {
match self {
Signature::EdDSA(signature) => Ok(signature),
_ => bail!(CryptoError::InvalidSignature),
}
}
fn as_rsa(&self) -> Result<&RSASignature, Error> {
match self {
Signature::RSA(signature) => Ok(signature),
_ => bail!(CryptoError::InvalidSignature),
}
}
}
#[derive(Debug)]
pub enum SignatureState {
ECDSA(ECDSASignatureState),
EdDSA(EdDSASignatureState),
RSA(RSASignatureState),
}
#[derive(Debug, Clone)]
pub struct ExclusiveSignatureState {
state: Arc<SignatureState>,
}
impl ExclusiveSignatureState {
fn new(signature_state: SignatureState) -> Self {
ExclusiveSignatureState {
state: Arc::new(signature_state),
}
}
fn open(kp_handle: Handle) -> Result<Handle, Error> {
let kp = WASI_CRYPTO_CTX.signature_keypair_manager.get(kp_handle)?;
let signature_state = match kp {
SignatureKeyPair::ECDSA(kp) => {
ExclusiveSignatureState::new(SignatureState::ECDSA(ECDSASignatureState::new(kp)))
}
SignatureKeyPair::EdDSA(kp) => {
ExclusiveSignatureState::new(SignatureState::EdDSA(EdDSASignatureState::new(kp)))
}
SignatureKeyPair::RSA(kp) => {
ExclusiveSignatureState::new(SignatureState::RSA(RSASignatureState::new(kp)))
}
};
let handle = WASI_CRYPTO_CTX
.signature_state_manager
.register(signature_state)?;
Ok(handle)
}
fn update(&mut self, input: &[u8]) -> Result<(), Error> {
match self.state.as_ref() {
SignatureState::ECDSA(state) => state.update(input),
SignatureState::EdDSA(state) => state.update(input),
SignatureState::RSA(state) => state.update(input),
}
}
fn sign(&mut self) -> Result<Signature, Error> {
let signature = match self.state.as_ref() {
SignatureState::ECDSA(state) => Signature::ECDSA(state.sign()?),
SignatureState::EdDSA(state) => Signature::EdDSA(state.sign()?),
SignatureState::RSA(state) => Signature::RSA(state.sign()?),
};
Ok(signature)
}
}
#[allow(dead_code)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum SignatureEncoding {
Raw = 1,
Hex = 2,
Base64Original = 3,
Base64OriginalNoPadding = 4,
Base64URLSafe = 5,
Base64URLSafeNoPadding = 6,
DER = 7,
}
#[derive(Debug)]
pub enum SignatureVerificationState {
ECDSA(ECDSASignatureVerificationState),
EdDSA(EdDSASignatureVerificationState),
RSA(RSASignatureVerificationState),
}
#[derive(Debug, Clone)]
pub struct ExclusiveSignatureVerificationState {
state: Arc<SignatureVerificationState>,
}
impl ExclusiveSignatureVerificationState {
fn new(signature_verification_state: SignatureVerificationState) -> Self {
ExclusiveSignatureVerificationState {
state: Arc::new(signature_verification_state),
}
}
fn open(pk_handle: Handle) -> Result<Handle, Error> {
let pk = WASI_CRYPTO_CTX.signature_publickey_manager.get(pk_handle)?;
let signature_verification_state = match pk {
SignaturePublicKey::ECDSA(pk) => ExclusiveSignatureVerificationState::new(
SignatureVerificationState::ECDSA(ECDSASignatureVerificationState::new(pk)),
),
SignaturePublicKey::EdDSA(pk) => ExclusiveSignatureVerificationState::new(
SignatureVerificationState::EdDSA(EdDSASignatureVerificationState::new(pk)),
),
SignaturePublicKey::RSA(pk) => ExclusiveSignatureVerificationState::new(
SignatureVerificationState::RSA(RSASignatureVerificationState::new(pk)),
),
};
let handle = WASI_CRYPTO_CTX
.signature_verification_state_manager
.register(signature_verification_state)?;
Ok(handle)
}
fn update(&mut self, input: &[u8]) -> Result<(), Error> {
match self.state.as_ref() {
SignatureVerificationState::ECDSA(state) => state.update(input),
SignatureVerificationState::EdDSA(state) => state.update(input),
SignatureVerificationState::RSA(state) => state.update(input),
}
}
fn verify(&self, signature_handle: Handle) -> Result<(), Error> {
let signature = WASI_CRYPTO_CTX.signature_manager.get(signature_handle)?;
match self.state.as_ref() {
SignatureVerificationState::ECDSA(state) => state.verify(signature.as_ecdsa()?),
SignatureVerificationState::EdDSA(state) => state.verify(signature.as_eddsa()?),
SignatureVerificationState::RSA(state) => state.verify(signature.as_rsa()?),
}
}
}
pub fn signature_export(
signature_handle: Handle,
encoding: SignatureEncoding,
) -> Result<Vec<u8>, Error> {
match encoding {
SignatureEncoding::Raw => {}
_ => bail!(CryptoError::NotAvailable),
}
let signature = WASI_CRYPTO_CTX.signature_manager.get(signature_handle)?;
Ok(signature.as_ref().to_vec())
}
pub fn signature_import(
op_handle: Handle,
encoding: SignatureEncoding,
encoded: &[u8],
) -> Result<Handle, Error> {
let signature_op = WASI_CRYPTO_CTX.signature_op_manager.get(op_handle)?;
let signature = match encoding {
SignatureEncoding::Raw => Signature::from_raw(signature_op.alg(), encoded)?,
_ => bail!(CryptoError::NotAvailable),
};
let handle = WASI_CRYPTO_CTX.signature_manager.register(signature)?;
Ok(handle)
}
pub fn signature_state_open(kp_handle: Handle) -> Result<Handle, Error> {
ExclusiveSignatureState::open(kp_handle)
}
pub fn signature_state_update(state_handle: Handle, input: &[u8]) -> Result<(), Error> {
let mut state = WASI_CRYPTO_CTX.signature_state_manager.get(state_handle)?;
state.update(input)
}
pub fn signature_state_sign(state_handle: Handle) -> Result<Handle, Error> {
let mut state = WASI_CRYPTO_CTX.signature_state_manager.get(state_handle)?;
let signature = state.sign()?;
let handle = WASI_CRYPTO_CTX.signature_manager.register(signature)?;
Ok(handle)
}
pub fn signature_state_close(handle: Handle) -> Result<(), Error> {
WASI_CRYPTO_CTX.signature_state_manager.close(handle)
}
pub fn signature_verification_state_open(pk_handle: Handle) -> Result<Handle, Error> {
ExclusiveSignatureVerificationState::open(pk_handle)
}
pub fn signature_verification_state_update(
verification_state_handle: Handle,
input: &[u8],
) -> Result<(), Error> {
let mut state = WASI_CRYPTO_CTX
.signature_verification_state_manager
.get(verification_state_handle)?;
state.update(input)
}
pub fn signature_verification_state_verify(
verification_state_handle: Handle,
signature_handle: Handle,
) -> Result<(), Error> {
let state = WASI_CRYPTO_CTX
.signature_verification_state_manager
.get(verification_state_handle)?;
state.verify(signature_handle)
}
pub fn signature_verification_state_close(handle: Handle) -> Result<(), Error> {
WASI_CRYPTO_CTX
.signature_verification_state_manager
.close(handle)
}
pub fn signature_close(handle: Handle) -> Result<(), Error> {
WASI_CRYPTO_CTX.signature_manager.close(handle)
}
|
use uuid::Uuid;
use std::process::Command;
/**
* Get printers on unix systems using lpstat
*/
pub fn get_printers() -> Vec<super::Printer> {
let out = Command::new("lpstat").arg("-e").output().unwrap();
if out.status.success() {
unsafe {
let out_str = String::from_utf8_unchecked(out.stdout);
let lines: Vec<&str> = out_str.split_inclusive("\n").collect();
let mut printers: Vec<super::Printer> = Vec::with_capacity(lines.len());
for line in lines {
let system_name = line.replace("\n", "");
printers.push(super::Printer {
id: Uuid::new_v5(&Uuid::NAMESPACE_DNS, system_name.as_bytes()).to_string(),
name: String::from(system_name.replace("_", " ").trim()),
system_name: system_name,
});
}
return printers;
};
} else {
return Vec::with_capacity(0);
}
}
/**
* Print on unix systems using lp
*/
pub fn print(printer_system_name: &String, file_path: &std::path::PathBuf) -> bool {
let process = Command::new("lp")
.arg("-d")
.arg(printer_system_name)
.arg(file_path)
.output()
.unwrap();
return process.status.success();
}
|
extern crate proc_macro;
use proc_macro::TokenStream;
mod derives;
#[proc_macro_derive(Entity, attributes(event_sauce))]
pub fn derive_entity(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::entity::expand_derive_entity(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(CreateEventData, attributes(event_sauce))]
pub fn derive_create_event_data(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::event_data::expand_derive_create_event_data(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(UpdateEventData, attributes(event_sauce))]
pub fn derive_update_event_data(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::event_data::expand_derive_update_event_data(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(DeleteEventData, attributes(event_sauce))]
pub fn derive_delete_event_data(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::event_data::expand_derive_delete_event_data(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(EnumEventData, attributes(event_sauce))]
pub fn derive_action_event_data(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::enum_event_data::expand_derive_enum_event_data(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(PurgeEventData, attributes(event_sauce))]
pub fn derive_purge_event_data(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match derives::event_data::expand_derive_purge_event_data(&input) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
|
pub fn maximum_gap(nums: Vec<i32>) -> i32 {
use std::cmp::*;
let n = nums.len();
if n < 2 {
return 0
} else if n == 2 {
return (nums[1] - nums[0]).abs()
}
let min_v = nums.iter().fold(nums[0], |acc, &x| min(acc, x));
let max_v = nums.iter().fold(nums[0], |acc, &x| max(acc, x));
let b = max((max_v - min_v) / (n as i32 - 1), 1);
let mut buckets = vec![vec![None, None]; ((max_v - min_v) / b) as usize + 1];
for num in nums {
let index = ((num - min_v) / b) as usize;
buckets[index][0] = Some(buckets[index][0].map_or(num, |k| min(k, num)));
buckets[index][1] = Some(buckets[index][1].map_or(num, |k| max(k, num)));
}
let mut last_max = buckets[0][1].unwrap();
let mut running_max = 0;
for i in 0..buckets.len() {
if let Some(current_min) = buckets[i][0] {
running_max = max(running_max, current_min - last_max);
last_max = buckets[i][1].unwrap();
}
}
running_max
} |
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::marker::PhantomData;
use role;
use counter::{Counter, CounterRange, AtomicCounter};
use sequence::{Sequence, Limit};
use buffer::BufRange;
use super::half::{Half, HeadHalf};
#[derive(Debug)]
pub(crate) struct Head<S: Sequence, R: Sequence> {
sender: S,
receiver: R,
sender_count: AtomicUsize,
receiver_count: AtomicUsize,
}
#[derive(Debug)]
pub(crate) struct SenderHead<S: Sequence, R: Sequence, T> {
head: Arc<Head<S, R>>,
capacity: usize,
role: PhantomData<role::Send<T>>,
}
#[derive(Debug)]
pub(crate) struct ReceiverHead<S: Sequence, R: Sequence, T> {
head: Arc<Head<S, R>>,
role: PhantomData<role::Receive<T>>,
}
pub(crate) type SenderHalf<S, R, T> = Half<Arc<Head<S, R>>, SenderHead<S, R, T>, T>;
pub(crate) type ReceiverHalf<S, R, T> = Half<Arc<Head<S, R>>, ReceiverHead<S, R, T>, T>;
impl<S: Sequence, R: Sequence> Head<S, R> {
pub fn new(sender: S, receiver: R) -> Arc<Self> {
Arc::new(Head {
sender,
receiver,
sender_count: 0.into(),
receiver_count: 0.into(),
})
}
}
impl<S: Sequence, R: Sequence> Head<S, R> {
pub fn close(&self) {
self.sender.counter().close();
}
}
impl<S: Sequence, R: Sequence> BufRange for Arc<Head<S, R>> {
fn range(&self) -> CounterRange {
let sender_last = self.sender.fetch_last();
let receiver_last = self.receiver.fetch_last();
Counter::range(receiver_last, sender_last)
}
}
impl<S: Sequence, R: Sequence, T> SenderHead<S, R, T> {
pub fn new(head: Arc<Head<S, R>>, capacity: usize) -> Self {
SenderHead {
head,
capacity,
role: PhantomData,
}
}
}
impl<S: Sequence, R: Sequence, T> HeadHalf for SenderHead<S, R, T> {
type Seq = S;
type Role = role::Send<T>;
fn seq(&self) -> &S {
&self.head.sender
}
fn amount(&self) -> &AtomicUsize {
&self.head.sender_count
}
fn close_counter(&self) -> &AtomicCounter {
self.head.sender.counter()
}
}
impl<S: Sequence, R: Sequence, T> Limit for SenderHead<S, R, T> {
fn count(&self) -> Counter {
self.head.receiver.fetch_last() + self.capacity
}
}
impl<S: Sequence, R: Sequence, T> Clone for SenderHead<S, R, T> {
fn clone(&self) -> Self {
SenderHead {
head: Arc::clone(&self.head),
capacity: self.capacity,
role: PhantomData,
}
}
}
impl<S: Sequence, R: Sequence, T> ReceiverHead<S, R, T> {
pub fn new(head: Arc<Head<S, R>>) -> Self {
ReceiverHead {
head,
role: PhantomData,
}
}
}
impl<S: Sequence, R: Sequence, T> HeadHalf for ReceiverHead<S, R, T> {
type Seq = R;
type Role = role::Receive<T>;
fn seq(&self) -> &R {
&self.head.receiver
}
fn amount(&self) -> &AtomicUsize {
&self.head.receiver_count
}
fn close_counter(&self) -> &AtomicCounter {
self.head.sender.counter()
}
}
impl<S: Sequence, R: Sequence, T> Limit for ReceiverHead<S, R, T> {
fn count(&self) -> Counter {
self.head.sender.fetch_last()
}
}
impl<S: Sequence, R: Sequence, T> Clone for ReceiverHead<S, R, T> {
fn clone(&self) -> Self {
ReceiverHead {
head: Arc::clone(&self.head),
role: PhantomData,
}
}
}
|
// Given an array of integers, find if the array contains any duplicates.
//
// Your function should return true if any value appears at least twice in the array, and it should return false if every element is distinct.
//
// Example 1:
//
// Input: [1,2,3,1]
// Output: true
// Example 2:
//
// Input: [1,2,3,4]
// Output: false
// Example 3:
//
// Input: [1,1,1,3,3,4,3,2,4,2]
// Output: true
struct Solution {}
fn main() {
println!("contains: {}", Solution::contains_duplicate(vec![1, 2, 3, 1]));
}
use std::collections::HashSet;
impl Solution {
pub fn contains_duplicate(nums: Vec<i32>) -> bool {
let mut set = HashSet::new();
for x in nums {
if set.contains(&x) {
return true;
}
/// 为啥这么写可以
set.insert(x);
}
return false;
}
}
|
use iron::IronError;
use iron::status::Status;
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::io;
use std::str::Utf8Error;
use std::sync::{RwLockReadGuard, RwLockWriteGuard, PoisonError};
use super::common::Backend;
pub type MogResult<T> = Result<T, MogError>;
#[derive(Debug)]
pub enum MogError {
Io(io::Error),
PoisonedMutex,
Utf8(Utf8Error),
NoDomain,
UnregDomain(String),
DomainExists(String),
NoKey,
UnknownKey(String),
KeyExists(String),
UnknownCommand(Option<String>),
NoContent(String),
}
impl MogError {
pub fn error_kind(&self) -> &str {
use self::MogError::*;
match *self {
NoDomain => "no_domain",
UnregDomain(..) => "unreg_domain",
DomainExists(..) => "domain_exists",
NoKey => "no_key",
UnknownKey(..) => "unknown_key",
KeyExists(..) => "key_exists",
UnknownCommand(..) => "unknown_command",
_ => "other_error",
}
}
}
impl<'a> From<PoisonError<RwLockReadGuard<'a, Backend>>> for MogError {
fn from (_: PoisonError<RwLockReadGuard<'a, Backend>>) -> MogError {
MogError::PoisonedMutex
}
}
impl<'a> From<PoisonError<RwLockWriteGuard<'a, Backend>>> for MogError {
fn from (_: PoisonError<RwLockWriteGuard<'a, Backend>>) -> MogError {
MogError::PoisonedMutex
}
}
impl From<io::Error> for MogError {
fn from(io_err: io::Error) -> MogError {
MogError::Io(io_err)
}
}
impl From<Utf8Error> for MogError {
fn from(utf8_err: Utf8Error) -> MogError {
MogError::Utf8(utf8_err)
}
}
impl From<MogError> for IronError {
fn from(err: MogError) -> IronError {
use self::MogError::*;
let modifier = match &err {
&UnknownKey(ref k) => {
(Status::NotFound, format!("Unknown key: {:?}\n", k))
},
&NoContent(ref k) => {
(Status::NotFound, format!("No content key: {:?}\n", k))
},
e @ _ => {
(Status::InternalServerError, format!("{}\n", e.description()))
}
};
IronError::new(err, modifier)
}
}
impl Display for MogError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
use self::MogError::*;
match *self {
Io(ref io_err) => write!(f, "{}", io_err),
Utf8(ref utf8_err) => write!(f, "{}", utf8_err),
UnregDomain(ref d) => write!(f, "Domain name {:?} invalid / not found", d),
DomainExists(ref d) => write!(f, "That domain already exists: {:?}", d),
UnknownKey(ref d) => write!(f, "Unknown key: {:?}", d),
KeyExists(ref d) => write!(f, "Target key name {:?} already exists, can't overwrite.", d),
UnknownCommand(ref d) => write!(f, "Unknown command: {:?}", d),
NoContent(ref d) => write!(f, "No content for key: {:?}", d),
_ => write!(f, "{}", self.description()),
}
}
}
impl Error for MogError {
fn description(&self) -> &str {
use self::MogError::*;
match *self {
Io(ref io_err) => io_err.description(),
Utf8(ref utf8_err) => utf8_err.description(),
PoisonedMutex => "Poisoned mutex",
NoDomain => "No domain provided",
UnregDomain(..) => "Domain name invalid / not found",
DomainExists(..) => "Domain already exists",
NoKey => "No key provided",
UnknownKey(..) => "Unknown key",
KeyExists(..) => "Key already exists",
UnknownCommand(..) => "Unknown command",
NoContent(..) => "No content",
}
}
}
|
//! Types which are needed to use the audio system independent from the root `riddle` crate.
mod audio_system;
pub use audio_system::*;
|
mod list_file_systems_builder;
pub use list_file_systems_builder::ListFileSystemsBuilder;
mod create_file_system_builder;
pub use create_file_system_builder::CreateFileSystemBuilder;
mod delete_file_system_builder;
pub use delete_file_system_builder::DeleteFileSystemBuilder;
mod get_file_system_properties_builder;
pub use get_file_system_properties_builder::GetFileSystemPropertiesBuilder;
mod set_file_system_properties_builder;
pub use set_file_system_properties_builder::SetFileSystemPropertiesBuilder;
|
// A struct, which declares a lifetime.
struct Foo<'a> {
// And a reference, which uses the lifetime.
x: &'a i32,
}
fn main() {
let y = 5;
let mut f = Foo { x: &y };
{
let z = 6;
// This does not work, because f outlives z.
f.x = &z;
}
println!("{}", f.x);
}
|
// Para fijar la semilla de numeros aleatorios
use rand::Rng;
use rand::rngs::StdRng;
// Para hacer shuffle de un vector
use rand::seq::SliceRandom;
// Para tener mutabilidad interior
use std::cell::RefCell;
use crate::problem_datatypes::{DataPoints, Constraints, Point, ConstraintType, NeighbourGenerator};
use crate::fitness_evaluation_result::FitnessEvaluationResult;
/// Estructura que representa una solucion del problema
///
/// La solucion viene representada como un vector de indices
/// En dicho vector, la posicion i-esima indica el cluster al que pertenece el i-esimo
/// punto del conjunto de datos
#[derive(Debug, Clone)]
pub struct Solution<'a, 'b> {
cluster_indexes: Vec<u32>,
data_points: &'a DataPoints,
constraints: &'b Constraints,
number_of_clusters: i32,
/// Representa el peso de infeasibility en el calculo de fitness
/// Solo se calcula una vez al invocar a Solution::new
lambda: f64,
// Para cachear el valor de fitness pues es un calculo costoso de realizar
// Como los datos del struct no cambian, podemos hacer el cacheo sin miedo
// Usamos RefCell para tener un patron de mutabilidad interior
fitness: RefCell<Option<f64>>,
}
impl<'a, 'b> Solution<'a, 'b> {
/// Util cuando no conocemos el valor de lambda, pues se calcula en esta llamada
/// En otro caso, se puede construir el struct de forma directa
pub fn new(
cluster_indexes: Vec<u32>,
data_points: &'a DataPoints,
constraints: &'b Constraints,
number_of_clusters: i32,
) -> Self {
// Calculamos el valor de lambda
let lambda = Point::max_distance_among_two(&data_points.get_points()) / constraints.get_data().len() as f64;
return Self {
cluster_indexes,
data_points,
constraints,
number_of_clusters,
lambda,
fitness: RefCell::new(None),
};
}
pub fn get_cluster_indexes(&self) -> Vec<u32>{
return self.cluster_indexes.clone();
}
pub fn get_lambda(&self) -> f64{
return self.lambda;
}
pub fn get_data_points(&self) -> &DataPoints{
return self.data_points;
}
/// Comprueba si la solucion es valida o no
/// Una solucion no es valida cuando existen clusters que no tienen ningun punto asignado
/// Tambien es invalido cuando la dimensionalidad del vector de asignacion a cluster no
/// coincide con la cantidad de puntos que tenemos que asignar
pub fn is_valid(&self) -> bool {
// Condicion de seguridad que nunca deberia ocurrir
// Por eso pongo el panic!, porque es un problema de probramacion
if self.cluster_indexes.len() != self.data_points.get_points().len(){
eprintln!("No puede ocurrir que la longitud de los indices sea distinta al numero de puntos");
return false;
}
// Comprobamos que no haya clusters vacios
for cluster in 0..self.number_of_clusters{
match self.cluster_indexes.iter().find(|&&x| x == cluster as u32){
// Se ha encontrado, no hacemos nada
Some(_) => (),
// No hemos encontrado ningun valor de indice que apunte a este cluster
None => {
return false
},
}
}
// No hemos encontrado cluster vacio
return true;
}
/// Devuelve el valor de fitness. Si ya ha sido calculado antes, devuelve
/// el valor cacheado sin repetir los calculos
pub fn fitness(&self) -> f64 {
let fit_opt = *self.fitness.borrow();
match fit_opt{
// Tenemos el valor cacheado del fitness, no repetimos calculos
Some(fitness) => return fitness,
// No hemos calculado todavia el valor de fitness
// Lo calculamos, lo guardamos y lo devolvemos
None => {
let calc_fitness = self.global_cluster_mean_distance() + self.lambda * self.infeasibility() as f64;
*self.fitness.borrow_mut() = Some(calc_fitness);
return calc_fitness;
}
}
}
/// Calcula el valor del fitness, y las iteraciones consumidas en el proceso
/// Las iteraciones consumidas pueden ser o bien 0 o bien 1
pub fn fitness_and_consumed(&self) -> (f64, u32){
let consumed = match *self.fitness.borrow(){
None => 1,
Some(_) => 0,
};
return (self.fitness(), consumed);
}
/// Resetea el valor de fitness a None, por lo tanto, cuando se intente acceder a este valor,
/// deberemos volver a calcular su valor
pub fn invalid_fitness_cache(&mut self){
*self.fitness.borrow_mut() = None;
}
/// Comprueba si el fitness de la solucion esta cacheado (ya calculado) o no
pub fn is_fitness_cached(&self) -> bool{
match *self.fitness.borrow(){
Some(_) => return true,
None => return false,
};
}
/// Devuelve el primer vecino de la solucion valido que mejora la solucion
/// actual (el primero mejor)
/// Necesitamos saber cuantas evaluaciones podemos consumir como criterio de parada, para
/// evitar generar todo un vecindario cuando esto no sea posible por agotar las evaluaciones
pub fn get_neighbour(&self, left_iterations: i32, rng: &mut StdRng) -> FitnessEvaluationResult<Option<Self>> {
// Para llevar las cuentas de las llamadas a funcion fitness consumidas en la operacion
let mut fitness_consumed = 0;
// Fitness de la solucion a mejorar. Solo lo calculamos una unica vez
let (fitness_to_beat, ev_cons) = self.fitness_and_consumed();
fitness_consumed += ev_cons;
// Tomo los generadores de vecinos
let mut neighbours_generator = NeighbourGenerator::generate_all_neighbours(self.data_points.len() as i32, self.number_of_clusters);
// Mezclo los generadores de vecinos
neighbours_generator.shuffle(rng);
for current_generator in neighbours_generator{
// Generamos la nueva solucion
let current_solution = self.generate_solution_from(current_generator);
// Si la solucion no es valida, no perdemos evaluaciones del fitness
if current_solution.is_valid() == false{
continue;
}
// Tomamos el valor del fitness de la nueva solucion
let (current_fitness, ev_cons) = current_solution.fitness_and_consumed();
fitness_consumed += ev_cons;
// Comprobamos si hemos mejorado a la solucion original
if current_fitness < fitness_to_beat{
return FitnessEvaluationResult::new(Some(current_solution), fitness_consumed);
}
// Comprobamos si hemos excedido el maximo de evaluaciones de las que disponiamos
if fitness_consumed >= left_iterations as u32{
// No hemos encontrado un vecino mejor a tiempo
return FitnessEvaluationResult::new(None, fitness_consumed);
}
}
// No hemos encontrado un vecino mejor
return FitnessEvaluationResult::new(None, fitness_consumed);
}
/// A partir de un NeighbourGenerator, genera la solucion que representa el
/// generador aplicado a la solucion &self
fn generate_solution_from(&self, generator: NeighbourGenerator) -> Self{
let mut new_solution = Self{
cluster_indexes: self.cluster_indexes.clone(),
data_points: &self.data_points,
constraints: &self.constraints,
number_of_clusters: self.number_of_clusters,
lambda: self.lambda,
fitness: RefCell::new(None), // None porque hemos cambiado la solucion, por tanto,
// tendra otro valor de fitness
};
new_solution.cluster_indexes[generator.get_element_index() as usize] = generator.get_new_cluster();
return new_solution;
}
/// Genera una solucion inicial aleatoria, como punto de partida de las busquedas
// TODO -- no puede dejar clusters vacios
pub fn generate_random_solution(
data_points: &'a DataPoints,
constraints: &'b Constraints,
number_of_clusters: i32,
rng: &mut StdRng
) -> Self {
return Self::new(
(0..data_points.get_points().len()).into_iter().map(|_| rng.gen_range(0..number_of_clusters) as u32).collect(),
data_points,
constraints,
number_of_clusters,
);
}
/// Dado un cluster (representado por el entero que los identifica), calcula
/// la distancia intracluster en la solucion actual
pub fn intra_cluster_distance(&self, cluster: u32) -> f64{
// Calculamos el vector de puntos que estan en el cluster
let cluster_points = self.get_points_in_cluster(cluster);
// Comprobacion de seguridad
if cluster_points.len() == 0{
panic!("[Err: Solution::intra_cluster_distance] Cluster without points");
}
// Calculamos el centroide de dicho conjunto de puntos
let centroid = Point::calculate_centroid(&cluster_points);
// Calculamos la distancia intracluster
let mut cum_sum = 0.0;
for point in &cluster_points{
cum_sum += Point::distance(point, ¢roid);
}
return cum_sum / cluster_points.len() as f64;
}
/// Dado un cluster indicado por el indice que lo representa, devuelve los puntos
/// que componen dicho cluster
pub fn get_points_in_cluster(&self, cluster: u32) -> Vec<&Point>{
let mut cluster_points = vec![];
for (index, curr_cluster) in self.cluster_indexes.iter().enumerate(){
if *curr_cluster == cluster{
cluster_points.push(&self.data_points.get_points()[index]);
}
}
return cluster_points;
}
/// Dado un cluster indicado por el indice que lo representa, devuelve los indices de los
/// puntos que componen dicho cluster
pub fn get_index_points_in_cluster(&self, cluster: u32) -> Vec<usize>{
let mut index_cluster_points = vec![];
for (index, curr_cluster) in self.cluster_indexes.iter().enumerate(){
if *curr_cluster == cluster{
index_cluster_points.push(index);
}
}
return index_cluster_points;
}
/// Calcula la media de distancias intracluster sobre todos los clusters
/// Esto representa una de las componentes de la funcion fitness
pub fn global_cluster_mean_distance(&self) -> f64{
let mut cum_sum = 0.0;
for i in 0 .. self.number_of_clusters as u32 {
cum_sum += self.intra_cluster_distance(i);
}
return cum_sum / self.number_of_clusters as f64;
}
/// Calcula el numero de restricciones que se violan en la solucion actual
pub fn infeasibility(&self) -> i32{
let mut infea = 0;
for ((first_index, second_index), value) in self.constraints.get_data(){
// Tomamos los dos indices de cluster para compararlos
let first_cluster = self.cluster_indexes[*first_index as usize];
let second_cluster = self.cluster_indexes[*second_index as usize];
match value{
ConstraintType::MustLink => {
// Sumamos cuando no estan en el mismo cluster
if first_cluster != second_cluster{
infea += 1;
}
}
ConstraintType::CannotLink => {
// Sumamos cuando estan en el mismo cluster
if first_cluster == second_cluster{
infea += 1;
}
}
}
}
return infea;
}
/// Muestra las distancias intracluster de cada uno de los clusters
/// Esta funcion ha sido usada para debuggear el codigo
pub fn show_intra_cluster_distances(&self){
for cluster in 0 .. self.number_of_clusters{
println!("\tDistancia intra-cluster del cluster {}: {}", cluster, self.intra_cluster_distance(cluster as u32));
}
}
/// Devuelve el conjunto de clusters que tiene mas de un punto asignado
// TODO -- TEST -- es muy facil de testear y es algo bastante critico
pub fn get_clusters_with_more_than_one_point(&self) -> Vec<i32>{
let mut clusters_with_more_than_one_point = vec![];
for cluster in 0..self.number_of_clusters{
let points_in_cluster = self.get_points_in_cluster(cluster as u32);
if points_in_cluster.len() >= 2{
clusters_with_more_than_one_point.push(cluster);
}
}
return clusters_with_more_than_one_point;
}
/// Devuelve el conjunto de clusters que no tienen puntos asignados
// TODO -- TEST -- es muy facil de testear y algo critico
pub fn get_cluster_without_points(&self) -> Vec<i32>{
let mut clusters_without_points = vec![];
for cluster in 0..self.number_of_clusters{
let points_in_cluster = self.get_points_in_cluster(cluster as u32);
if points_in_cluster.len() == 0{
clusters_without_points.push(cluster);
}
}
return clusters_without_points;
}
}
/// Metodos asociados a la parte genetica de las practicas
impl<'a, 'b> Solution<'a, 'b> {
/// Dadas dos soluciones, devuelve aquella con mejor fitness
/// Entendemos por mejor fitness, aquel con menor valor numerico, pues estamos buscando
/// minimizar la funcion de fitness
pub fn binary_tournament<'c>(first: &'c Solution<'a, 'b>, second: &'c Solution<'a, 'b>) -> (&'c Solution<'a, 'b>, u32){
let (first_fitness, first_consumed) = first.fitness_and_consumed();
let (second_fitness, second_consumed) = second.fitness_and_consumed();
if first_fitness < second_fitness{
return (first, first_consumed + second_consumed);
}else{
return (second, first_consumed + second_consumed);
}
}
/// Operador de cruce uniforme para dos soluciones
// TODO -- testear porque creo que puede estar mal
pub fn uniform_cross(first: &Self, second: &Self, rng: &mut StdRng) -> Self{
let gen_size= first.cluster_indexes.len();
let half_gen_size = (gen_size as f64 / 2.0) as usize;
// Generamos aleatoriamente las posiciones de los genes del primer padre con las que nos
// quedamos. Para ello, tomamos una permutacion aleatoria de {0, ..., gen_size - 1} y nos
// quedamos con la primera mitad. La segunda mitad nos indicara las posiciones que usamos
// del segundo padre
let mut positions_to_mutate: Vec<usize> = (0..gen_size as usize).collect();
positions_to_mutate.shuffle(rng);
// Nueva solucion a partir de la informacion de uno de los padres
let mut crossed_solution = first.clone();
// Tomamos los elementos aleatorios del primer padre
for index in 0..half_gen_size{
// Tenemos que usar el indice que indica de la permutacion aleatoria
let curr_index = positions_to_mutate[index];
crossed_solution.cluster_indexes[curr_index] = first.cluster_indexes[curr_index];
}
// Tomamos los elementos aleatorios del segundo padre
for index in half_gen_size..gen_size{
// Tenemos que usar el indice que indica de la permutacion aleatoria
let curr_index = positions_to_mutate[index];
crossed_solution.cluster_indexes[curr_index] = second.cluster_indexes[curr_index];
}
// No deberia ocurrir, pero reseteo el valor del fitness para evitar problemas
// No añade sobrecoste, porque al estar cruzando, el fitness de la nueva solucion se tiene
// que recalcular de todas formas
crossed_solution.invalid_fitness_cache();
// Reparamos la solucion en caso de que sea necesario
if crossed_solution.is_valid() == false {
crossed_solution.repair_solution(rng);
}
return crossed_solution;
}
/// Operador de cruce por segmento fijo
// TODO -- testear porque puede estar bastante mal
pub fn cross_segment(first: &Self, second: &Self, rng: &mut StdRng) -> Self{
// Nueva solucion a partir de la informacion de uno de los padres
let mut crossed_solution = first.clone();
let gen_size= first.cluster_indexes.len();
// Seleccionamos el inicio y tamaño del segmento
let segment_start = rng.gen_range(0..gen_size);
let segment_size = rng.gen_range(0..gen_size);
// Copiamos los valores del primer padre
for i in 0..segment_size{
// Calculamos la posicion actual en el segmento
let index = (segment_start + i) % gen_size;
crossed_solution.cluster_indexes[index] = first.cluster_indexes[index];
}
// Copiamos, con cruce uniforme, el resto de valores
for i in 0..(gen_size - segment_size){
// Calculamos el indice de la misma forma que antes, partiendo de donde nos quedamos
let index = (segment_size + segment_start + i) % gen_size;
// Padre del que queremos tomar la informacion
// Tenemos que poner el rango hasta 2, porque el extremo superior no se considera
let choose_parent = rng.gen_range(0..2);
if choose_parent == 0{
crossed_solution.cluster_indexes[index] = first.cluster_indexes[index];
}else{
crossed_solution.cluster_indexes[index] = second.cluster_indexes[index];
}
}
// No deberia ocurrir, pero reseteo el valor del fitness para evitar problemas
// No añade sobrecoste, porque al estar cruzando, el fitness de la nueva solucion se tiene
// que recalcular de todas formas
crossed_solution.invalid_fitness_cache();
// Reparamos la solucion en caso de que sea necesario
if crossed_solution.is_valid() == false {
crossed_solution.repair_solution(rng);
}
return crossed_solution;
}
/// Devuelve una solucion mutada
///
/// Permitimos que se mute a una solucion no valida, que luego es reparada. Si no hacemos esto,
/// perdemos mucha variabilidad que queremos introducir con la mutacion. Por ejemplo, si
/// dejamos un cluster con un solo punto, esta posicion se queda "atascada" al no poder ser
/// cambiada por mutacion. Esto era lo que provocaba el mal comportamiento
pub fn mutated(&self, rng: &mut StdRng) -> Self{
// Copiamos la solucion para realizar una modificacion
let mut mutated_sol = self.clone();
// Tomamos una posicion a mutar. Esta posicion puede ser la de un cluster que no tenga mas
// de dos puntos. En ese caso, deja al cluster vacio. Por ello, debemos reparar la solucion
// si se da el caso. Otra opcion es comprobar que el cluster escogido tenga al menos dos
// puntos asignados, pero entonces excluimos clusters con solo un punto, disminuyendo en
// parte la variabilidad que introduce la mutacion
let mut_position_candidates: Vec<i32> = (0..mutated_sol.data_points.len() as i32).collect();
let mut_position = mut_position_candidates.choose(rng).expect("No tenemos puntos en nuestro dataset que asignar a clusters");
// Podemos elegir como nuevo valor aquellos que esten en el itervalo adecuado y que no sean
// el cluster original que ya teniamos, pues estariamos perdiendo una mutacion efectiva
let mut new_cluster_candidates: Vec<i32> = (0..mutated_sol.number_of_clusters).collect();
new_cluster_candidates.retain(|&x| x != mutated_sol.cluster_indexes[*mut_position as usize] as i32);
let mut_value = new_cluster_candidates.choose(rng).expect("No hemos podido generar una lista de clusters candidatos");
// Mutamos el valor
mutated_sol.cluster_indexes[*mut_position as usize] = *mut_value as u32;
// Reseteamos el fitness, porque estamos haciendo un cambio a la solucion que devolvemos
mutated_sol.invalid_fitness_cache();
// Comprobamos que la solucion sea valida. En caso de que no lo sea, la reparamos
if mutated_sol.is_valid() == false{
mutated_sol.repair_solution(rng);
}
// Comprobamos que la solucion no sea la misma (puede ocurrir al reparar). En caso de que
// sea la misma solucion, volvemos a mutar
if mutated_sol.cluster_indexes == self.cluster_indexes{
return self.mutated(rng);
}
return mutated_sol;
}
/// Devuelve una solucion mutada fuertemente. Se usa para iterative_local_search. La mutacion
/// que usamos en algoritmos geneticos, porque queremos alejarnos mas de la solucion dada
pub fn hard_mutated(&self, segment_size: usize, rng: &mut StdRng) -> Self{
// Copia para devolver la solucion mutada sin tener que mutar la solucion original
let mut mutated = self.clone();
// Seleccionamos el inicio del segmento
let gen_size = self.cluster_indexes.len();
let segment_start = rng.gen_range(0..gen_size);
// Mutamos los valores el el segmento. El resto de valores son automaticamente copiados del
// padre porque mutated es clone de self
for i in 0..segment_size{
// Indice que debemos mutar segun los valores del segmento
let index = (segment_start + i) % segment_size;
// Mutamos dicho valor. No comprobamos que la mutacion sea ahora valida, para dar mas
// variedad. Mas adelante repararemos la solucion
let new_cluster = rng.gen_range(0..mutated.number_of_clusters);
mutated.cluster_indexes[index] = new_cluster as u32;
}
// Reparamos la solucion si la solucion mutada acaba por no ser valida
if mutated.is_valid() == false{
mutated.repair_solution(rng);
}
return mutated;
}
/// Repara una solucion. Toma los clusters sin puntos asignados, y asigna aleatoriamente un
/// punto de un cluster que tenga mas de un punto asignado (pues no podemos dejar otros
/// clusters vacios en el proceso de reparacion)
pub fn repair_solution(&mut self, rng: &mut StdRng){
// Tomamos los clusters sin puntos asignados
let clusters_without_points = self.get_cluster_without_points();
if clusters_without_points.len() == 0{
return;
}
// Tomamos el primer cluster sin puntos para repararlo
let cluster_without_points = clusters_without_points[0];
// Tomamos los clusters con al menos dos puntos asignados
let clusters_with_more_than_one_point = self.get_clusters_with_more_than_one_point();
// Realizamos el cambio
// Seleccionamos el cluster del que quitamos un punto aleatoriamente
let selected_cluster = clusters_with_more_than_one_point.choose(rng).expect("No hay clusters con mas de un punto");
// Seleccionamos un indice en los puntos a los que realizar el cambio
let point_indixes_selected_cluster = self.get_index_points_in_cluster(*selected_cluster as u32);
let selected_point_index = point_indixes_selected_cluster.choose(rng).expect("No hay puntos en el cluster seleccionado");
// Realizamos la asignacion
self.cluster_indexes[*selected_point_index] = cluster_without_points as u32;
// Si quedan mas clusters sin puntos asignados, volvemos a llamar a esta funcion para que
// siga reparando la solucion
if clusters_without_points.len() >= 2{
self.repair_solution(rng);
}
// Al haber modificado la solucion, debemos invalidar la cache del fitness
self.invalid_fitness_cache();
}
}
/// Metodos asociados a la parte memetica de las practicas
impl<'a, 'b> Solution<'a, 'b> {
pub fn soft_local_search(&self, max_fails: i32, rng: &mut StdRng) -> FitnessEvaluationResult<Self>{
let mut new_solution = self.clone();
let mut fit_eval_cons = 0;
// Recorreremos las posiciones de los puntos en orden aleatorio
let mut indixes: Vec<i32> = (0..self.data_points.len() as i32).collect();
indixes.shuffle(rng);
// Valores iniciales para el algoritmo
let mut fails = 0;
let mut i = 0;
// Iteramos sobre las asignaciones de clusters mientras no sobrepasemos el valor de fallos
while fails < max_fails && i < self.data_points.len(){
// Tomamos la posicion que toca cambiar en esta iteracion
let index = indixes[i];
// Seleccionamos el mejor cluster para este punto en la posicion index
let new_cluster_result = new_solution.select_best_cluster(index as u32);
let new_cluster = new_cluster_result.get_result();
fit_eval_cons += new_cluster_result.get_iterations_consumed();
// Realizamos el cambio, guardando el valor original de la asignacion
let past_cluster = new_solution.cluster_indexes[index as usize];
new_solution.cluster_indexes[index as usize] = *new_cluster;
// Comprobamos si hemos realizado un cambio o no, incrementando el contador de fallos
// en caso de que sea necesario
if *new_cluster == past_cluster{
fails += 1;
}
// Pasamos a la siguiente posicion
i += 1;
}
return FitnessEvaluationResult::new(new_solution, fit_eval_cons);
}
/// Selecciona la mejor asignacion de cluster para un punto dado por su indice.
/// La mejor asignacion es aquella que es valida y que tiene el minimo valor de fitness
/// Esta operacion va a consumir muchas evaluaciones del fitness
/// La solucion &self debe ser una solucion valida para poder hacer esta busqueda sin problemas
// TODO -- TEST -- bastante facil de testear, ademas es una parte critica de los memeticos
pub fn select_best_cluster(&self, point_index: u32) -> FitnessEvaluationResult<u32>{
// Comprobacion de seguridad
debug_assert!(
self.is_valid() == true,
"La solucion original no es valida, no se puede buscar la mejor asignacion de cluster"
);
let mut fit_eval_cons = 0;
let mut best_cluster = -1;
let mut best_fitness = -1.0;
// Recorremos todos las posibles asignaciones de cluster
for cluster in 0..self.number_of_clusters{
// Generamos la solucion asociada al cambio a este cluster
let mut new_sol = self.clone();
new_sol.cluster_indexes[point_index as usize] = cluster as u32;
// Comrpobamos que la solucion generada sea valida
if new_sol.is_valid() == false{
continue;
}
// Evaluamos el fitness. Para ello, previamente tenemos que invalidar la cache
new_sol.invalid_fitness_cache();
let (new_sol_fit, ev_cons) = new_sol.fitness_and_consumed();
fit_eval_cons += ev_cons;
// Comprobamos si esta es la mejor
if best_fitness == -1.0 || new_sol_fit < best_fitness{
best_fitness = new_sol_fit;
best_cluster = cluster;
}
}
// Comprobacion de seguridad
debug_assert!(
best_cluster != -1,
"No hemos encontrado una mejor asignacion. Esto no es correcto, pues la solucion original es valida y podria ser un primer candidato"
);
return FitnessEvaluationResult::new(best_cluster as u32, fit_eval_cons);
}
}
/// Metodos asociados al algoritmo de enfriamiento simulado
impl<'a, 'b> Solution<'a, 'b>{
/// Genera un vecino aleatorio unico, sin recurrir a usar generador de vecinos
/// Se parece mucho a mutated. Sin embargo, en mutated generamos la mutacion y permitimos
/// soluciones no validas, que son reparadas. Esto hace que en una mutacion pueda cambiar mas
/// de un valor. En este caso, comprobamos que solo se modifique una posicion
pub fn one_random_neighbour(&self, rng: &mut StdRng) -> Self{
// Usamos la funcion de mutacion para realizar el cambio
let mutated = self.mutated(rng);
// Si hay mas de una diferencia, es porque el operador de reparacion ha reparado provocando
// mas cambios. En este caso, esto no es lo que queremos
if mutated.number_of_discrepancies(self) != 1{
return self.mutated(rng);
}
return mutated;
}
/// Calcula el numero de puntos que tienen distinta asignacion de cluster entre dos soluciones
// TODO -- TEST -- muy sencillo de escribir un test
fn number_of_discrepancies(&self, other: &Solution) -> i32{
let mut discrepancies = 0;
for index in 0..self.cluster_indexes.len(){
if self.cluster_indexes[index] != other.cluster_indexes[index]{
discrepancies += 1;
}
}
return discrepancies;
}
}
#[cfg(test)]
mod tests{
use crate::problem_datatypes::Solution;
use crate::problem_datatypes::DataPoints;
use crate::problem_datatypes::Point;
use crate::problem_datatypes::Constraints;
use crate::problem_datatypes::ConstraintType;
use rand::rngs::StdRng;
use rand::SeedableRng;
// Para comprobar que dos soluciones son practicamente iguales (ignorando problemas
// del punto flotante)
use assert_approx_eq::assert_approx_eq;
fn epsilon() -> f64{0.01} // Tolerancia a fallos de punto flotante
fn max_test_iterations() -> u32{10000} // Maximo de iteraciones sobre test
/// Callback porque en otro caso tenemos que hacer clones de los datos
/// que componen la solucion que devolvemos
/// FnOnce porque queremos tener ownership de la solucion que generamos
fn generate_basic_solution(callback: impl FnOnce(&Solution)) {
let cluster_indexes = vec![0, 1, 2, 3, 0, 1];
let data_points = DataPoints::new(vec![
Point::from_vec(vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0]),
Point::from_vec(vec![0.0, 1.0, 0.0, 0.0, 0.0, 0.0]),
Point::from_vec(vec![0.0, 0.0, 1.0, 0.0, 0.0, 0.0]),
Point::from_vec(vec![0.0, 0.0, 0.0, 1.0, 0.0, 0.0]),
Point::from_vec(vec![0.0, 0.0, 0.0, 0.0, 1.0, 0.0]),
Point::from_vec(vec![0.0, 0.0, 0.0, 0.0, 0.0, 1.0]),
]);
let mut constraints = Constraints::new();
constraints.add_constraint(0, 1, ConstraintType::CannotLink);
constraints.add_constraint(0, 2, ConstraintType::CannotLink);
constraints.add_constraint(1, 3, ConstraintType::CannotLink);
constraints.add_constraint(1, 4, ConstraintType::MustLink);
constraints.add_constraint(2, 5, ConstraintType::MustLink);
let number_of_clusters = 4;
let solution = Solution::new(cluster_indexes, &data_points, &constraints, number_of_clusters);
callback(&solution)
}
#[test]
// Simplemente comprobamos que estamos almacenando bien los puntos
fn test_solution_saves_properly_data_points_over_basic_sol(){
generate_basic_solution(|solution| {
let data_points = solution.get_data_points().get_points();
assert_eq!(solution.get_points_in_cluster(0), vec![&data_points[0], &data_points[4]]);
assert_eq!(solution.get_points_in_cluster(1), vec![&data_points[1], &data_points[5]]);
assert_eq!(solution.get_points_in_cluster(2), vec![&data_points[2]]);
assert_eq!(solution.get_points_in_cluster(3), vec![&data_points[3]]);
});
}
#[test]
// Comprobamos que la distancia maxima entre dos puntos es la que tiene que ser
fn test_lambda_is_correct_over_basic_sol(){
generate_basic_solution(|solution| {
let calculated_lambda = solution.get_lambda();
let expected_lambda = (2.0 as f64).sqrt() / 5.0;
assert_approx_eq::assert_approx_eq!(calculated_lambda, expected_lambda, epsilon());
});
}
#[test]
// Comprobamos que estamos calculando bien el numero de restricciones violadas
fn test_infeasibility_is_correct_over_basic_sol(){
generate_basic_solution(|solution| {
let calc_infea = solution.infeasibility();
let exp_infea = 2; // Solo se violan las dos must link
assert_eq!(calc_infea, exp_infea);
});
// Hacemos una variacion de la solucion
generate_basic_solution(|solution| {
// Modifico la solucion
let cluster_indexes = vec![1, 1, 2, 3, 0, 1];
let other_solution = Solution::new(cluster_indexes, solution.data_points, solution.constraints, solution.number_of_clusters);
let calc_infea = other_solution.infeasibility();
let exp_infea = 3; // Se violan las dos must link y una CannotLink
assert_eq!(calc_infea, exp_infea);
});
}
#[test]
fn test_centroids_over_basic_sol(){
generate_basic_solution(|solution| {
// Primer cluster
let cluster_points = solution.get_points_in_cluster(0);
let calc_centroid = Point::calculate_centroid(&cluster_points);
let exp_centroid = Point::from_vec(vec![0.5, 0. , 0. , 0. , 0.5, 0. ]);
assert_eq!(calc_centroid, exp_centroid);
// Segundo cluster
let cluster_points = solution.get_points_in_cluster(1);
let calc_centroid = Point::calculate_centroid(&cluster_points);
let exp_centroid = Point::from_vec(vec![0. , 0.5, 0. , 0. , 0. , 0.5]);
assert_eq!(calc_centroid, exp_centroid);
// Tercer cluster
let cluster_points = solution.get_points_in_cluster(2);
let calc_centroid = Point::calculate_centroid(&cluster_points);
let exp_centroid = Point::from_vec(vec![0.0, 0.0, 1.0, 0.0, 0.0, 0.0]);
assert_eq!(calc_centroid, exp_centroid);
// Cuarto cluster
let cluster_points = solution.get_points_in_cluster(3);
let calc_centroid = Point::calculate_centroid(&cluster_points);
let exp_centroid = Point::from_vec(vec![0.0, 0.0, 0.0, 1.0, 0.0, 0.0]);
assert_eq!(calc_centroid, exp_centroid);
});
}
#[test]
fn test_intracluser_distance_over_basic_sol(){
generate_basic_solution(|solution| {
// Distancia intracluster del primer cluster
let calc_intra = solution.intra_cluster_distance(0);
let exp_intra = 0.7071067811865476;
assert_approx_eq!(calc_intra, exp_intra, epsilon());
// Distancia intracluster del segundo cluster
let calc_intra = solution.intra_cluster_distance(1);
let exp_intra = 0.7071067811865476;
assert_approx_eq!(calc_intra, exp_intra, epsilon());
// Distancia intracluster del tercer cluster
let calc_intra = solution.intra_cluster_distance(2);
let exp_intra = 0.0;
assert_approx_eq!(calc_intra, exp_intra, epsilon());
// Distancia intracluster del cuarto cluster
let calc_intra = solution.intra_cluster_distance(3);
let exp_intra = 0.0;
assert_approx_eq!(calc_intra, exp_intra, epsilon());
});
}
#[test]
fn test_global_cluster_distance_over_basic_sol(){
generate_basic_solution(|solution| {
let calc_global_dist = solution.global_cluster_mean_distance();
let exp_global_dist = (0.7071067811865476 * 2.0) / 4.0;
assert_approx_eq!(calc_global_dist, exp_global_dist, epsilon());
});
}
#[test]
fn test_fitness_is_correct_over_basic_sol(){
generate_basic_solution(|solution| {
let calc_fitness = solution.fitness();
let exp_lambda = (2.0 as f64).sqrt() / 5.0;
let exp_global_dist = (0.7071067811865476 * 2.0) / 4.0;
let exp_infea = 2;
let exp_fitness = exp_lambda * exp_infea as f64 + exp_global_dist;
assert_approx_eq::assert_approx_eq!(calc_fitness, exp_fitness, epsilon());
});
}
#[test]
fn test_solutions_are_not_valid(){
generate_basic_solution(|solution| {
// A partir de la solucion basica, generamos una solucion que no sea valida
let cluster_indexes = vec![0, 0, 0, 0, 1, 1];
let solution = Solution::new(cluster_indexes, &solution.data_points, &solution.constraints, solution.number_of_clusters);
let expected_is_valid = false;
let calc_is_valid = solution.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
// Ahora generamos una solucion que no sea valida por el tamaño del vector (pequeño)
let cluster_indexes = vec![0, 1, 2, 3];
let solution = Solution::new(cluster_indexes, &solution.data_points, &solution.constraints, solution.number_of_clusters);
let expected_is_valid = false;
let calc_is_valid = solution.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
// Ahora generamos una solucion que no sea valida por el tamaño del vector (grande)
let cluster_indexes = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let solution = Solution::new(cluster_indexes, &solution.data_points, &solution.constraints, solution.number_of_clusters);
let expected_is_valid = false;
let calc_is_valid = solution.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
});
}
#[test]
fn test_repair_bad_solutions(){
generate_basic_solution(|solution| {
// A partir de la solucion basica, generamos una solucion que no sea valida, pero con
// una dimensionalidad correcta
let cluster_indexes = vec![0, 0, 0, 0, 1, 1];
let solution = Solution::new(cluster_indexes, &solution.data_points, &solution.constraints, solution.number_of_clusters);
// Reparamos la solucion. Al tener dependencia aleatoria, hacemos esto muchas veces
// sobre la solucion mala original y vemos que se hace bien
let mut rng = StdRng::seed_from_u64(123456789);
for _ in 0..max_test_iterations(){
let mut curr_sol = solution.clone();
curr_sol.repair_solution(&mut rng);
let expected_is_valid = true;
let calc_is_valid = curr_sol.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
}
// Generamos otra solucion que no sea valida, y vemos si se repara bien
let cluster_indexes = vec![2, 1, 2, 0, 1, 1];
let solution = Solution::new(cluster_indexes, &solution.data_points, &solution.constraints, solution.number_of_clusters);
// Reparamos la solucion. Al tener dependencia aleatoria, hacemos esto muchas veces
// sobre la solucion mala original y vemos que se hace bien
let mut rng = StdRng::seed_from_u64(123456789);
for _ in 0..max_test_iterations(){
let mut curr_sol = solution.clone();
curr_sol.repair_solution(&mut rng);
let expected_is_valid = true;
let calc_is_valid = curr_sol.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
}
});
}
#[test]
fn test_mutation_generates_valid_population(){
generate_basic_solution(|solution| {
let mut rng = StdRng::seed_from_u64(123456789);
// Dependemos de la aleatoriedad, asi que repetimos un numero dado de veces el
// experimento
for _ in 0..max_test_iterations(){
let mut_sol = solution.mutated(&mut rng);
let expected_is_valid = true;
let calc_is_valid = mut_sol.is_valid();
assert_eq!(expected_is_valid, calc_is_valid);
}
});
}
#[test]
fn test_mutated_solution_differs_at_least_one_position(){
generate_basic_solution(|solution| {
let mut rng = StdRng::seed_from_u64(123456789);
// Dependemos de la aleatoriedad, asi que repetimos un numero dado de veces el
// experimento
for _ in 0..max_test_iterations(){
let mut_sol = solution.mutated(&mut rng);
// Calculamos las diferencias entre el original y el mutado
let mut calc_diffs = 0;
for (index, cluster_assignation) in mut_sol.cluster_indexes.iter().enumerate(){
if *cluster_assignation != solution.cluster_indexes[index]{
calc_diffs += 1;
}
}
let calc_diffs = calc_diffs;
let expected_min_diffs = 1;
assert!(calc_diffs >= expected_min_diffs, "Se esperaba al menos una diferencia, se obtuvieron {} diferencias", calc_diffs);
}
});
}
}
|
use std::ops::Deref;
use rocket::http::Status;
use rocket::request::{self, FromRequest};
use rocket::{Outcome, Request};
pub use super::models::Staff;
pub use super::models::new::Staff as NewStaff;
use super::{DatabaseConnection, SelectError};
use session::Session;
// Enable upsert on the email field.
generate_crud_fns!(staff, NewStaff, Staff, (email -> full_name, is_admin));
pub fn get(conn: &DatabaseConnection, id: i32) -> Result<Staff, SelectError> {
generate_select_body!(single, conn, staff, Staff, (id, id))
}
pub fn find_email(conn: &DatabaseConnection, staff_email: &str) -> Result<Staff, SelectError> {
generate_select_body!(single, conn, staff, Staff, (email, staff_email))
}
pub fn get_all(conn: &DatabaseConnection) -> Result<Vec<Staff>, SelectError> {
generate_select_body!(multi, conn, staff, Staff)
}
impl<'a, 'r> FromRequest<'a, 'r> for Staff {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<Staff, ()> {
let sess = request.guard::<Session>()?;
let conn = request.guard::<DatabaseConnection>()?;
match find_email(&conn, &sess.email) {
Ok(s) => Outcome::Success(s),
Err(SelectError::NoSuchValue()) => Outcome::Failure((Status::Forbidden, ())),
Err(SelectError::DieselError(e)) => {
error!("Diesel error fetching Staff record: {}", e);
debug!("Detailed error: {:?}", e);
Outcome::Failure((Status::InternalServerError, ()))
}
}
}
}
pub struct Admin(pub Staff);
impl Deref for Admin {
type Target = Staff;
fn deref(&self) -> &Staff {
&self.0
}
}
impl<'a, 'r> FromRequest<'a, 'r> for Admin {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<Admin, ()> {
let s = request.guard::<Staff>()?;
if s.is_admin {
Outcome::Success(Admin(s))
} else {
Outcome::Failure((Status::Forbidden, ()))
}
}
}
|
#[doc = "Reader of register ANA_CTL1"]
pub type R = crate::R<u32, super::ANA_CTL1>;
#[doc = "Writer for register ANA_CTL1"]
pub type W = crate::W<u32, super::ANA_CTL1>;
#[doc = "Register ANA_CTL1 `reset()`'s with value 0x0606_0000"]
impl crate::ResetValue for super::ANA_CTL1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0606_0000
}
}
#[doc = "Reader of field `MDAC`"]
pub type MDAC_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `MDAC`"]
pub struct MDAC_W<'a> {
w: &'a mut W,
}
impl<'a> MDAC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "Reader of field `PDAC`"]
pub type PDAC_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `PDAC`"]
pub struct PDAC_W<'a> {
w: &'a mut W,
}
impl<'a> PDAC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
#[doc = "Reader of field `NDAC`"]
pub type NDAC_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `NDAC`"]
pub struct NDAC_W<'a> {
w: &'a mut W,
}
impl<'a> NDAC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);
self.w
}
}
#[doc = "Reader of field `VPROT_OVERRIDE`"]
pub type VPROT_OVERRIDE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VPROT_OVERRIDE`"]
pub struct VPROT_OVERRIDE_W<'a> {
w: &'a mut W,
}
impl<'a> VPROT_OVERRIDE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `R_GRANT_CTL`"]
pub type R_GRANT_CTL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `R_GRANT_CTL`"]
pub struct R_GRANT_CTL_W<'a> {
w: &'a mut W,
}
impl<'a> R_GRANT_CTL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `RST_SFT_HVPL`"]
pub type RST_SFT_HVPL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RST_SFT_HVPL`"]
pub struct RST_SFT_HVPL_W<'a> {
w: &'a mut W,
}
impl<'a> RST_SFT_HVPL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Trimming of the output margin Voltage as a function of Vpos and Vneg."]
#[inline(always)]
pub fn mdac(&self) -> MDAC_R {
MDAC_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 16:19 - Trimming of positive pump output Voltage:"]
#[inline(always)]
pub fn pdac(&self) -> PDAC_R {
PDAC_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - Trimming of negative pump output Voltage:"]
#[inline(always)]
pub fn ndac(&self) -> NDAC_R {
NDAC_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bit 28 - '0': vprot = BG.vprot. '1': vprot = vcc"]
#[inline(always)]
pub fn vprot_override(&self) -> VPROT_OVERRIDE_R {
VPROT_OVERRIDE_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - r_grant control: '0': r_grant normal functionality '1': forces r_grant LO synchronized on clk_r"]
#[inline(always)]
pub fn r_grant_ctl(&self) -> R_GRANT_CTL_R {
R_GRANT_CTL_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - '1': Page Latches Soft Reset"]
#[inline(always)]
pub fn rst_sft_hvpl(&self) -> RST_SFT_HVPL_R {
RST_SFT_HVPL_R::new(((self.bits >> 30) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:7 - Trimming of the output margin Voltage as a function of Vpos and Vneg."]
#[inline(always)]
pub fn mdac(&mut self) -> MDAC_W {
MDAC_W { w: self }
}
#[doc = "Bits 16:19 - Trimming of positive pump output Voltage:"]
#[inline(always)]
pub fn pdac(&mut self) -> PDAC_W {
PDAC_W { w: self }
}
#[doc = "Bits 24:27 - Trimming of negative pump output Voltage:"]
#[inline(always)]
pub fn ndac(&mut self) -> NDAC_W {
NDAC_W { w: self }
}
#[doc = "Bit 28 - '0': vprot = BG.vprot. '1': vprot = vcc"]
#[inline(always)]
pub fn vprot_override(&mut self) -> VPROT_OVERRIDE_W {
VPROT_OVERRIDE_W { w: self }
}
#[doc = "Bit 29 - r_grant control: '0': r_grant normal functionality '1': forces r_grant LO synchronized on clk_r"]
#[inline(always)]
pub fn r_grant_ctl(&mut self) -> R_GRANT_CTL_W {
R_GRANT_CTL_W { w: self }
}
#[doc = "Bit 30 - '1': Page Latches Soft Reset"]
#[inline(always)]
pub fn rst_sft_hvpl(&mut self) -> RST_SFT_HVPL_W {
RST_SFT_HVPL_W { w: self }
}
}
|
#![allow(dead_code)]
use std::{
collections::VecDeque,
io,
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, ReadBuf};
#[derive(Debug)]
pub struct PeekableStream<S> {
inner: S,
buf: VecDeque<u8>,
}
impl<S> AsyncRead for PeekableStream<S>
where
S: AsyncRead + Unpin,
{
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf,
) -> Poll<io::Result<()>> {
let (first, ..) = &self.buf.as_slices();
if first.len() > 0 {
let read = first.len().min(buf.remaining());
let unfilled = buf.initialize_unfilled_to(read);
unfilled[0..read].copy_from_slice(&first[0..read]);
buf.advance(read);
// remove 0..read
self.buf.drain(0..read);
Poll::Ready(Ok(()))
} else {
Pin::new(&mut self.inner).poll_read(cx, buf)
}
}
}
impl<S> AsyncWrite for PeekableStream<S>
where
S: AsyncWrite + Unpin,
{
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
Pin::new(&mut self.inner).poll_write(cx, buf)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
Pin::new(&mut self.inner).poll_flush(cx)
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
Pin::new(&mut self.inner).poll_shutdown(cx)
}
}
impl<S> PeekableStream<S> {
pub fn new(inner: S) -> Self {
PeekableStream {
inner,
buf: VecDeque::new(),
}
}
pub fn with_buf(inner: S, buf: VecDeque<u8>) -> Self {
PeekableStream { inner, buf }
}
pub fn into_inner(self) -> (S, VecDeque<u8>) {
(self.inner, self.buf)
}
}
impl<S> PeekableStream<S>
where
S: AsyncRead + Unpin,
{
// Fill self.buf to size using self.tcp.read_exact
async fn fill_buf(&mut self, size: usize) -> io::Result<()> {
if size > self.buf.len() {
let to_read = size - self.buf.len();
let mut buf = vec![0u8; to_read];
self.inner.read_exact(&mut buf).await?;
self.buf.append(&mut buf.into());
}
Ok(())
}
pub async fn peek_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
self.fill_buf(buf.len()).await?;
let self_buf = self.buf.make_contiguous();
buf.copy_from_slice(&self_buf[0..buf.len()]);
Ok(())
}
pub async fn drain(&mut self, size: usize) -> io::Result<()> {
self.fill_buf(size).await?;
self.buf.drain(0..size);
Ok(())
}
}
|
use crate::entities::duration::Duration;
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone, GraphQLEnum)]
pub enum AggregationFunction {
Oldest,
Newest,
Max,
Min,
Sum,
Avg,
}
impl AggregationFunction {
pub fn reduce(&self, prev: f64, current: f64) -> f64 {
match self {
AggregationFunction::Oldest => prev,
AggregationFunction::Newest => current,
AggregationFunction::Max => prev.max(current),
AggregationFunction::Min => prev.min(current),
AggregationFunction::Sum => prev + current,
AggregationFunction::Avg => prev + current,
}
}
pub fn finish(&self, value: f64, count: u64) -> f64 {
match self {
AggregationFunction::Oldest => value,
AggregationFunction::Newest => value,
AggregationFunction::Max => value,
AggregationFunction::Min => value,
AggregationFunction::Sum => value,
AggregationFunction::Avg => value / (count as f64),
}
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone, GraphQLObject)]
pub struct AggregationStrategy {
pub function: AggregationFunction,
pub over: Duration,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone, GraphQLInputObject)]
pub struct NewAggregationStrategy {
pub function: AggregationFunction,
pub over: Duration,
}
impl From<NewAggregationStrategy> for AggregationStrategy {
fn from(strategy: NewAggregationStrategy) -> Self {
Self {
function: strategy.function,
over: strategy.over,
}
}
}
|
mod label;
pub use label::Label; |
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use futures::{Async, Future, Poll};
use futures::task::{self, Task};
use common::Never;
use self::lock::Lock;
#[derive(Clone)]
pub struct Cancel {
inner: Arc<Inner>,
}
pub struct Canceled {
inner: Arc<Inner>,
}
struct Inner {
is_canceled: AtomicBool,
task: Lock<Option<Task>>,
}
impl Cancel {
pub fn new() -> (Cancel, Canceled) {
let inner = Arc::new(Inner {
is_canceled: AtomicBool::new(false),
task: Lock::new(None),
});
let inner2 = inner.clone();
(
Cancel {
inner: inner,
},
Canceled {
inner: inner2,
},
)
}
pub fn cancel(&self) {
if !self.inner.is_canceled.swap(true, Ordering::SeqCst) {
if let Some(mut locked) = self.inner.task.try_lock() {
if let Some(task) = locked.take() {
task.notify();
}
}
// if we couldn't take the lock, Canceled was trying to park.
// After parking, it will check is_canceled one last time,
// so we can just stop here.
}
}
pub fn is_canceled(&self) -> bool {
self.inner.is_canceled.load(Ordering::SeqCst)
}
}
impl Future for Canceled {
type Item = ();
type Error = Never;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if self.inner.is_canceled.load(Ordering::SeqCst) {
Ok(Async::Ready(()))
} else {
if let Some(mut locked) = self.inner.task.try_lock() {
if locked.is_none() {
// it's possible a Cancel just tried to cancel on another thread,
// and we just missed it. Once we have the lock, we should check
// one more time before parking this task and going away.
if self.inner.is_canceled.load(Ordering::SeqCst) {
return Ok(Async::Ready(()));
}
*locked = Some(task::current());
}
Ok(Async::NotReady)
} else {
// if we couldn't take the lock, then a Cancel taken has it.
// The *ONLY* reason is because it is in the process of canceling.
Ok(Async::Ready(()))
}
}
}
}
impl Drop for Canceled {
fn drop(&mut self) {
self.inner.is_canceled.store(true, Ordering::SeqCst);
}
}
// a sub module just to protect unsafety
mod lock {
use std::cell::UnsafeCell;
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, Ordering};
pub struct Lock<T> {
is_locked: AtomicBool,
value: UnsafeCell<T>,
}
impl<T> Lock<T> {
pub fn new(val: T) -> Lock<T> {
Lock {
is_locked: AtomicBool::new(false),
value: UnsafeCell::new(val),
}
}
pub fn try_lock(&self) -> Option<Locked<T>> {
if !self.is_locked.swap(true, Ordering::SeqCst) {
Some(Locked { lock: self })
} else {
None
}
}
}
unsafe impl<T: Send> Send for Lock<T> {}
unsafe impl<T: Send> Sync for Lock<T> {}
pub struct Locked<'a, T: 'a> {
lock: &'a Lock<T>,
}
impl<'a, T> Deref for Locked<'a, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.lock.value.get() }
}
}
impl<'a, T> DerefMut for Locked<'a, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.lock.value.get() }
}
}
impl<'a, T> Drop for Locked<'a, T> {
fn drop(&mut self) {
self.lock.is_locked.store(false, Ordering::SeqCst);
}
}
}
|
use super::engine::{Action, Round};
use super::state::GameState;
use crate::cards::BasicCard;
pub fn format_hand(hand: &[BasicCard], gs: &GameState) -> String {
let mut cards: Vec<_> = hand.iter().collect();
cards.sort_by(|a, b| gs.display_order(a, b));
let sc: Vec<_> = cards.iter().map(|x| format!("{}", x)).collect();
sc.join(" ")
}
pub fn format_state(gs: &GameState) -> String {
format!(
"Player 1: Score {}, Hand: {}\nPlayer 2: Score {}, Hand: {}\nTrump: {}\n",
gs.score[0],
&format_hand(&gs.hands[0], gs),
gs.score[1],
&format_hand(&gs.hands[1], gs),
gs.trump
)
}
pub fn format_round(round: &Round) -> String {
format!(
"{}{}\n",
&format_state(round.get_state()),
round.get_phase().format(round.get_state())
)
}
pub fn format_action(action: &Action) -> String {
format!("Player {} plays {}.", action.player + 1, action.card)
}
|
use std::thread;
use std::time::Duration;
fn workout_plan(intensity:u32, random_number:u32){
let expensive_call = simulated_expensive_calculations(intensity); //refactor code by adding this line hence it only call once.
if intensity < 25 {
println!("Today, do {} pushups!",expensive_call); //not calling here just using the returned value from the fucntion above.
println!("Next, do {} situps!",expensive_call); //not calling here just using the returned value from the fucntion above.
}
else {
if random_number == 3 {
println!("Take a break today! Remember to stay hydrated!");
}
else {
println!("Today run for {} minutes",expensive_call); //not calling here just using the returned value from the fucntion above.
}
}
}
fn simulated_expensive_calculations(intensity: u32)->u32{ //function
println!("Calculating slowly..");
thread::sleep(Duration::from_secs(2));
intensity
}
fn main() {
let simulated_user_specified_value = 10;
let simulated_random_number = 7;
workout_plan(simulated_user_specified_value,simulated_random_number);
}
|
fn main() {
//2 fibonacci
// fn fib(z: i32) -> i32 {
// if z==0 { return 0 }
// else if z==1 { return 1 }
// else { return fib(z-1) + fib(z-2) }
// }
// let mut counter = 0;
// let mut this_fib = fib(0);
// let mut even_sum = 0;
// let mut done = false; // mut done: bool
// while !done {
// println!("fib of {} is {:?} and total is {}",counter, this_fib, even_sum );
// if counter % 2 == 0 { even_sum += this_fib }
// counter += 1;
// this_fib = fib(counter);
// if this_fib > 4000000 { done = true; }
// }
// println!("final sum of fibs {:?}", even_sum);
//3 The prime factors of 13195 are 5, 7, 13 and 29.
//What is the largest prime factor of the number 600851475143 ?
// fn fac(mut x: i64) -> Vec<i64> {
// let mut v: Vec<i64> = Vec::new();
// let mut d = 2;
// while x > 1 {
// while x % d == 0 {
// v.push(d);
// x /= d
// }
// d += 1
// }
// v
// }
// println!("{:?}", fac(9009) );
//prime_fac(4.0)
//4 A palindromic number reads the same both ways.
//The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99.
//Find the largest palindrome made from the product of two 3-digit numbers.
//run backwards from 1 million
//if number is a palindrome
//get list of pairs of 3-digit factors for number
// let s: u32 = 999999;
// let char_vec: Vec<char> = s.to_string().chars().collect();
// let rev_char_vec: Vec<char> = s.to_string().chars().rev().collect();
// println!("is {:?} equal to {:?}???", char_vec, &rev_char_vec );
// println!("{:?}", char_vec.eq( &rev_char_vec ) );
// #[derive(Debug)]
// struct Factor {
// f1: i32,
// f2: i32,
// prod: i32,
// }
// //get factors with i32 type
// fn fac3digit(mut x: i32) -> Vec<Factor> {
// let mut v: Vec<Factor> = Vec::new();
// let mut d = 999;
// while d > 99 {
// while x % d != 0 {
// d-=1
// }
// if x/d > 999 {
// d-=1
// } else {
// let factor: Factor = Factor { f1: d, f2: x/d, prod:x };
// v.push(factor);
// d-=1;
// }
// }
// v
// }
// let mut s: i32 = 1000000;
// while s > 1 {
// s -= 1;
// let char_vec: Vec<char> = s.to_string().chars().collect(); // turn i32 to char vector
// let rev_char_vec: Vec<char> = s.to_string().chars().rev().collect(); //reverse copy
// if char_vec.eq( &rev_char_vec ) { //check for palindrome
// let pal: String = char_vec.iter().collect(); //char vec back to string
// let pal_num: i32 = pal.parse().unwrap(); //string to i32
// let val_res: Vec<Factor> = fac3digit(pal_num); //get pair of 3-digit factors for number
// if val_res.len() > 0 {
// println!("{:?}", val_res );
// break
// }
// }
// }
//5
//count by the largest prime
let v10 = vec![10, 9, 8, 7, 6];
let v20 = vec![20, 19,18,17,16, 15, 14,13,12,11];
let mut chuck = 19;
while true {
let mut ok = true;
for i in v20.iter() {
if chuck % i != 0 {
ok = false;
break
}
}
if ok {
println!("{:?}", chuck );
break
}
chuck += 19
}
}
|
impl Solution {
pub fn reverse_parentheses(s: String) -> String {
let n = s.len();
let s = s.as_bytes();
let mut res = String::new();
let mut stk = Vec::new();
for i in 0..n{
if s[i] == b'('{
stk.push(res.clone());
res = "".to_string();
}else if s[i] == b')'{
res = res.chars().rev().collect();
res = stk.pop().unwrap().clone() + &res;
}else{
res.push(s[i] as char);
}
}
res
}
} |
#[doc = "Register `AXIMC_PERIPH_ID_2` reader"]
pub type R = crate::R<AXIMC_PERIPH_ID_2_SPEC>;
#[doc = "Field `PERIPH_ID_2` reader - PERIPH_ID_2"]
pub type PERIPH_ID_2_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - PERIPH_ID_2"]
#[inline(always)]
pub fn periph_id_2(&self) -> PERIPH_ID_2_R {
PERIPH_ID_2_R::new((self.bits & 0xff) as u8)
}
}
#[doc = "AXIMC peripheral ID2 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`aximc_periph_id_2::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AXIMC_PERIPH_ID_2_SPEC;
impl crate::RegisterSpec for AXIMC_PERIPH_ID_2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`aximc_periph_id_2::R`](R) reader structure"]
impl crate::Readable for AXIMC_PERIPH_ID_2_SPEC {}
#[doc = "`reset()` method sets AXIMC_PERIPH_ID_2 to value 0x3b"]
impl crate::Resettable for AXIMC_PERIPH_ID_2_SPEC {
const RESET_VALUE: Self::Ux = 0x3b;
}
|
use std::any::{Any as StdAny, TypeId};
#[cfg(feature = "nightly")]
use std::convert::TryFrom;
#[cfg(feature = "nightly")]
use std::intrinsics;
#[cfg(feature = "nightly")]
pub(crate) fn type_name<T: StdAny + ?Sized>() -> &'static str {
unsafe { intrinsics::type_name::<T>() }
}
#[cfg(not(feature = "nightly"))]
pub(crate) fn type_name<T: StdAny + ?Sized>() -> &'static str {
"[ONLY ON NIGHTLY]"
}
/// FIXME(https://github.com/rust-lang/rust/issues/27745) remove this
pub trait Any: StdAny {
fn type_id(&self) -> TypeId {
TypeId::of::<Self>()
}
#[doc(hidden)]
fn type_name(&self) -> &'static str {
type_name::<Self>()
}
}
impl<T> Any for T where T: StdAny + ?Sized {}
|
fn main() {
// let mut s = String::from("hello world");
// let word = first_word(&s);
// println!("xxx:{}",word);
// clear尝试获取一个可变引用,但是当拥有某个值的不可变引用时,就不能再获取它的可变引用了,所以这里Error
// s.clear();
let my_string = String::from("hello world");
let word = first_word(&my_string[..]);
println!("word:{}",word);
let my_string_literal = "hello world";
let word = first_word(&my_string_literal[..]);
println!("word:{}",word);
let word = first_word(my_string_literal);
println!("word:{}",word);
}
// fn first_word(s: &String) -> usize{
// let bytes = s.as_bytes();
// for(i,&item) in bytes.iter().enumerate(){
// if item == b' '{
// return i;
// }
// }
// s.len()
// }
// 字符串slice的类型声明写作&str
// fn first_word(s: &String) -> &str{
fn first_word(s:&str) -> &str{
let bytes = s.as_bytes();
for(i,&item) in bytes.iter().enumerate(){
if item == b' '{
return &s[..i];
}
}
&s[..]
} |
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_trait_method;
use clippy_utils::source::snippet;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::sym;
use super::ITER_SKIP_NEXT;
pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, skip_args: &[hir::Expr<'_>]) {
// lint if caller of skip is an Iterator
if is_trait_method(cx, expr, sym::Iterator) {
if let [caller, n] = skip_args {
let hint = format!(".nth({})", snippet(cx, n.span, ".."));
span_lint_and_sugg(
cx,
ITER_SKIP_NEXT,
expr.span.trim_start(caller.span).unwrap(),
"called `skip(..).next()` on an iterator",
"use `nth` instead",
hint,
Applicability::MachineApplicable,
);
}
}
}
|
const CONVERSION: [char; 16] = ['0','1','2','3',
'4','5','6','7',
'8','9','a','b',
'c','d','e','f'];
pub fn convert_ascii(a: u8) -> char {
CONVERSION[a as usize]
} |
use std::borrow::Cow;
type Index = u32;
#[derive(Debug, Clone)]
pub enum IndexedStr {
Indexed(Index, Index),
Concrete(Cow<'static, str>)
}
impl IndexedStr {
/// Whether this string is derived from indexes or not.
pub fn is_indexed(&self) -> bool {
match *self {
IndexedStr::Indexed(..) => true,
IndexedStr::Concrete(..) => false,
}
}
/// Retrieves the string `self` corresponds to. If `self` is derived from
/// indexes, the corresponding subslice of `string` is returned. Otherwise,
/// the concrete string is returned.
///
/// # Panics
///
/// Panics if `self` is an indexed string and `string` is None.
pub fn to_str<'a>(&'a self, string: Option<&'a str>) -> &'a str {
if self.is_indexed() && string.is_none() {
panic!("Cannot convert indexed str to str without base string!")
}
match *self {
IndexedStr::Indexed(i, j) => &string.unwrap()[(i as usize)..(j as usize)],
IndexedStr::Concrete(ref mstr) => &*mstr,
}
}
pub fn from(needle: &str, haystack: &str) -> Option<IndexedStr> {
let haystack_start = haystack.as_ptr() as usize;
let needle_start = needle.as_ptr() as usize;
if needle_start < haystack_start {
return None;
}
if (needle_start + needle.len()) > (haystack_start + haystack.len()) {
return None;
}
let start = needle_start - haystack_start;
let end = start + needle.len();
Some(IndexedStr::Indexed(start as Index, end as Index))
}
}
|
#[doc = "Register `RDATA13R` reader"]
pub type R = crate::R<RDATA13R_SPEC>;
#[doc = "Field `RDATA1` reader - Regular conversion data for SDADC1"]
pub type RDATA1_R = crate::FieldReader<u16>;
#[doc = "Field `RDATA3` reader - Regular conversion data for SDADC3"]
pub type RDATA3_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - Regular conversion data for SDADC1"]
#[inline(always)]
pub fn rdata1(&self) -> RDATA1_R {
RDATA1_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bits 16:31 - Regular conversion data for SDADC3"]
#[inline(always)]
pub fn rdata3(&self) -> RDATA3_R {
RDATA3_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
#[doc = "SDADC1 and SDADC3 regular data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdata13r::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RDATA13R_SPEC;
impl crate::RegisterSpec for RDATA13R_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rdata13r::R`](R) reader structure"]
impl crate::Readable for RDATA13R_SPEC {}
#[doc = "`reset()` method sets RDATA13R to value 0"]
impl crate::Resettable for RDATA13R_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//use std::env;
use std::fs;
use super::super::constants;
use log;
use serde::{Serialize, Deserialize};
use serde_json::Error;
#[derive(Serialize, Deserialize, Debug)]
pub struct Config {
pub site_domain: String,
pub site_author: String,
pub author_twitter: String,
pub author_email: String,
pub author_github_name: String,
pub port: u32,
pub host: String,
pub db_file: String,
pub static_files: String,
}
impl Config {
pub fn load() -> Self {
Self::from_file(constants::DEFAULT_CONFIG_FILE).expect("Unable to load file ./config.json")
}
pub fn from_file(file_path: &str) -> Result<Self, &str> {
let file_contents = match fs::read_to_string(file_path) {
Ok(content) => content,
Err(_) => panic!("Unable to read file {}", file_path)
};
let json_config : Result<Config, Error> = serde_json::from_str(&file_contents);
match json_config {
Ok(conf) => Ok(conf),
Err(_) => {
log::error!("Error parsing file {}", file_path);
Err("Parse error")
}
}
}
}
|
//! 914. 卡牌分组
//! https://leetcode-cn.com/problems/x-of-a-kind-in-a-deck-of-cards/
use std::collections::HashMap;
fn gcd(x: i32, y: i32) -> i32 {
if x == 0 {
return y;
}
gcd(y % x, x)
}
pub struct Solution;
impl Solution {
pub fn has_groups_size_x(deck: Vec<i32>) -> bool {
if deck.len() <= 1 {
return false;
}
let mut counts: [i32; 10000] = [0; 10000];
for x in deck {
counts[x as usize] += 1;
}
let mut g = counts[0];
for i in 1..counts.len() {
g = gcd(g, counts[i]);
}
g >= 2
}
pub fn has_groups_size_x2(deck: Vec<i32>) -> bool {
if deck.len() <= 1 {
return false;
}
let coll = deck.iter().fold(HashMap::new(), |mut acc, x| {
if let Some(v) = acc.get_mut(&x) {
*v += 1;
} else {
acc.insert(x, 1);
}
acc
});
let mut g = -1;
for x in coll.values() {
if g == -1 {
g = *x;
} else {
g = gcd(g, *x);
}
}
g >= 2
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_has_groups_size_x() {
assert_eq!(
Solution::has_groups_size_x(vec![1, 2, 3, 4, 4, 3, 2, 1]),
true
);
assert_eq!(
Solution::has_groups_size_x(vec![1, 1, 1, 2, 2, 2, 3, 3]),
false
);
assert_eq!(Solution::has_groups_size_x(vec![1]), false);
assert_eq!(Solution::has_groups_size_x(vec![1, 1]), true);
assert_eq!(Solution::has_groups_size_x(vec![1, 1, 2, 2, 2, 2]), true);
}
#[test]
fn test_has_groups_size_x2() {
assert_eq!(
Solution::has_groups_size_x2(vec![1, 2, 3, 4, 4, 3, 2, 1]),
true
);
assert_eq!(
Solution::has_groups_size_x2(vec![1, 1, 1, 2, 2, 2, 3, 3]),
false
);
assert_eq!(Solution::has_groups_size_x2(vec![1]), false);
assert_eq!(Solution::has_groups_size_x2(vec![1, 1]), true);
assert_eq!(Solution::has_groups_size_x2(vec![1, 1, 2, 2, 2, 2]), true);
}
}
|
extern crate chrono;
use chrono::DateTime;
use std::fmt::Debug;
pub trait Diff: Debug + PartialEq {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>>;
}
/// Field that differs
#[derive(Debug)]
pub struct Difference<'a> {
pub field: String,
pub left: &'a Debug,
pub right: &'a Debug,
}
macro_rules! impl_for_prim {
($t: ty) => {
impl Diff for $t {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>> {
if self != other {
return Some(vec![Difference {
field: String::new(),
left: self,
right: other,
}]);
}
None
}
}
};
}
impl<'b> Diff for &'b str {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>> {
if self != other {
return Some(vec![Difference {
field: String::new(),
left: self,
right: other,
}])
}
None
}
}
impl<T: chrono::TimeZone> Diff for DateTime<T> {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>> {
if self != other {
return Some(vec![Difference {
field: String::new(),
left: self,
right: other,
}]);
}
None
}
}
impl<T> Diff for Option<T> where T: std::fmt::Debug + PartialEq + Diff {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>> {
match (self, other) {
(&Some(ref left), &Some(ref right)) => {
left.diff(right)
}
(&None, &Some(_)) => {
Some(vec![Difference { field: format!("none"), left: self, right: other }])
},
(&Some(_), &None) => {
Some(vec![Difference { field: format!("some"), left: self, right: other }])
},
(&None, &None) => None,
}
}
}
impl<T> Diff for [T] where T: Diff {
fn diff<'a>(&'a self, other: &'a Self) -> Option<Vec<Difference<'a>>> {
if self != other {
let mut diffs = Vec::new();
for (i, (left, right)) in self.iter().zip(other.iter()).enumerate() {
if let Some(inner_diffs) = left.diff(right) {
for diff in inner_diffs {
let mut path = format!("[{}]", i);
if !diff.field.is_empty() {
path.push_str(".");
}
path.push_str(&diff.field);
diffs.push(Difference {
field: path,
left: diff.left,
right: diff.right,
});
}
}
}
if diffs.len() > 0 {
return Some(diffs)
}
}
None
}
}
impl_for_prim!(bool);
impl_for_prim!(isize);
impl_for_prim!(i8);
impl_for_prim!(i16);
impl_for_prim!(i32);
impl_for_prim!(i64);
impl_for_prim!(usize);
impl_for_prim!(u8);
impl_for_prim!(u16);
impl_for_prim!(u32);
impl_for_prim!(u64);
impl_for_prim!(f32);
impl_for_prim!(f64);
impl_for_prim!(char);
impl_for_prim!(String);
impl_for_prim!(chrono::NaiveDateTime); |
use icfp2019::prelude::Result;
use loggerv;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
struct Opt {
#[structopt(short = "v", parse(from_occurrences))]
verbose: u64,
#[structopt(subcommand)]
cmd: Command,
}
#[derive(StructOpt, Debug)]
enum Command {
#[structopt(name = "run")]
Run {
#[structopt(long = "id")]
id: Option<u64>,
},
#[structopt(name = "run-all")]
RunAll,
#[structopt(name = "test-run")]
TestRun {
#[structopt(long = "id")]
id: Option<u64>,
},
#[structopt(name = "report")]
Report,
#[structopt(name = "update-best")]
UpdateBest,
#[structopt(name = "ci")]
Ci,
}
fn main() -> Result<()> {
let opt = Opt::from_args();
loggerv::init_with_verbosity(opt.verbose).unwrap();
match opt.cmd {
Command::Run { id } => icfp2019::run::run(id.unwrap_or(0)),
Command::TestRun { id } => icfp2019::run::test_run(id.unwrap_or(0)),
Command::RunAll => icfp2019::run::run_all(),
Command::Report => icfp2019::run::report(),
Command::UpdateBest => icfp2019::run::update_best(),
Command::Ci => unimplemented!(),
}
}
|
use thiserror::Error;
#[derive(Debug, Error)]
pub enum Error {
#[error(transparent)]
KubeError(#[from] kube::error::Error),
#[error("missing object key in {0})")]
MissingObjectKey(&'static str),
}
|
use cgmath::{Vector1, Vector2, Vector3, Vector4};
use cgmath::{Matrix2, Matrix3, Matrix4};
use std::mem::transmute;
pub trait ToRawMath {
type Raw: Copy;
fn to_raw(self) -> Self::Raw;
}
macro_rules! impl_to_raw {
($from:ty, $to:ty) => {
impl ToRawMath for $from {
type Raw = $to;
fn to_raw(self) -> Self::Raw {
unsafe { transmute(self) }
}
}
}
}
impl_to_raw!(Vector1<f32>, [f32; 1]);
impl_to_raw!(Vector2<f32>, [f32; 2]);
impl_to_raw!(Vector3<f32>, [f32; 3]);
impl_to_raw!(Vector4<f32>, [f32; 4]);
impl_to_raw!(Matrix2<f32>, [[f32; 2]; 2]);
impl_to_raw!(Matrix3<f32>, [[f32; 3]; 3]);
impl_to_raw!(Matrix4<f32>, [[f32; 4]; 4]);
|
#[doc = "Register `HDPEXTR` reader"]
pub type R = crate::R<HDPEXTR_SPEC>;
#[doc = "Register `HDPEXTR` writer"]
pub type W = crate::W<HDPEXTR_SPEC>;
#[doc = "Field `HDP1_EXT` reader - HDP area extension in 8�Kbytes sectors in Bank1. Extension is added after the HDP1_END sector (included)."]
pub type HDP1_EXT_R = crate::FieldReader;
#[doc = "Field `HDP1_EXT` writer - HDP area extension in 8�Kbytes sectors in Bank1. Extension is added after the HDP1_END sector (included)."]
pub type HDP1_EXT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `HDP2_EXT` reader - HDP area extension in 8�Kbytes sectors in bank 2. Extension is added after the HDP2_END sector (included)."]
pub type HDP2_EXT_R = crate::FieldReader;
#[doc = "Field `HDP2_EXT` writer - HDP area extension in 8�Kbytes sectors in bank 2. Extension is added after the HDP2_END sector (included)."]
pub type HDP2_EXT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
impl R {
#[doc = "Bits 0:6 - HDP area extension in 8�Kbytes sectors in Bank1. Extension is added after the HDP1_END sector (included)."]
#[inline(always)]
pub fn hdp1_ext(&self) -> HDP1_EXT_R {
HDP1_EXT_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 16:22 - HDP area extension in 8�Kbytes sectors in bank 2. Extension is added after the HDP2_END sector (included)."]
#[inline(always)]
pub fn hdp2_ext(&self) -> HDP2_EXT_R {
HDP2_EXT_R::new(((self.bits >> 16) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - HDP area extension in 8�Kbytes sectors in Bank1. Extension is added after the HDP1_END sector (included)."]
#[inline(always)]
#[must_use]
pub fn hdp1_ext(&mut self) -> HDP1_EXT_W<HDPEXTR_SPEC, 0> {
HDP1_EXT_W::new(self)
}
#[doc = "Bits 16:22 - HDP area extension in 8�Kbytes sectors in bank 2. Extension is added after the HDP2_END sector (included)."]
#[inline(always)]
#[must_use]
pub fn hdp2_ext(&mut self) -> HDP2_EXT_W<HDPEXTR_SPEC, 16> {
HDP2_EXT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FLASH HDP extension register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hdpextr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hdpextr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HDPEXTR_SPEC;
impl crate::RegisterSpec for HDPEXTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hdpextr::R`](R) reader structure"]
impl crate::Readable for HDPEXTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hdpextr::W`](W) writer structure"]
impl crate::Writable for HDPEXTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HDPEXTR to value 0"]
impl crate::Resettable for HDPEXTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod counts;
mod globals;
mod list;
use std::sync::Arc;
use twilight_model::application::{
command::CommandOptionChoice,
interaction::{application_command::CommandOptionValue, ApplicationCommand},
};
use crate::{
commands::{
osu::{option_country, option_discord, option_mode, option_mods_explicit, option_name},
DoubleResultCow, MyCommand, MyCommandOption,
},
custom_client::OsuStatsListParams,
util::{
constants::common_literals::{ACC, ACCURACY, COMBO, MISSES, RANK, REVERSE, SCORE, SORT},
MessageExt,
},
BotResult, Context, Error,
};
pub use self::{counts::*, globals::*, list::*};
use super::{get_globals_count, require_link};
enum OsustatsCommandKind {
Count(CountArgs),
Players(OsuStatsListParams),
Scores(ScoresArgs),
}
impl OsustatsCommandKind {
async fn slash(ctx: &Context, command: &mut ApplicationCommand) -> DoubleResultCow<Self> {
let option = command
.data
.options
.pop()
.ok_or(Error::InvalidCommandOptions)?;
match option.value {
CommandOptionValue::SubCommand(options) => match option.name.as_str() {
"count" => match CountArgs::slash(ctx, command, options).await? {
Ok(args) => Ok(Ok(Self::Count(args))),
Err(content) => Ok(Err(content)),
},
"players" => match OsuStatsListParams::slash(options)? {
Ok(args) => Ok(Ok(Self::Players(args))),
Err(content) => Ok(Err(content.into())),
},
"scores" => match ScoresArgs::slash(ctx, command, options).await? {
Ok(args) => Ok(Ok(Self::Scores(args))),
Err(content) => Ok(Err(content)),
},
_ => Err(Error::InvalidCommandOptions),
},
_ => Err(Error::InvalidCommandOptions),
}
}
}
pub async fn slash_osustats(ctx: Arc<Context>, mut command: ApplicationCommand) -> BotResult<()> {
match OsustatsCommandKind::slash(&ctx, &mut command).await? {
Ok(OsustatsCommandKind::Count(args)) => _count(ctx, command.into(), args).await,
Ok(OsustatsCommandKind::Players(args)) => _players(ctx, command.into(), args).await,
Ok(OsustatsCommandKind::Scores(args)) => _scores(ctx, command.into(), args).await,
Err(content) => command.error(&ctx, content).await,
}
}
fn option_min_rank() -> MyCommandOption {
MyCommandOption::builder("min_rank", "Specify a min rank between 1 and 100")
.min_int(1)
.max_int(100)
.integer(Vec::new(), false)
}
fn option_max_rank() -> MyCommandOption {
MyCommandOption::builder("max_rank", "Specify a max rank between 1 and 100")
.min_int(1)
.max_int(100)
.integer(Vec::new(), false)
}
pub fn define_osustats() -> MyCommand {
let mode = option_mode();
let name = option_name();
let discord = option_discord();
let count_description =
"Count how often a user appears on top of map leaderboards (same as `/osc`)";
let count =
MyCommandOption::builder("count", count_description).subcommand(vec![mode, name, discord]);
let mode = option_mode();
let country = option_country();
let min_rank = option_min_rank();
let max_rank = option_max_rank();
let players_description = "National player leaderboard of global leaderboard counts";
let players = MyCommandOption::builder("players", players_description)
.help("List players of a country and how often they appear on global map leaderboards.")
.subcommand(vec![mode, country, min_rank, max_rank]);
let mode = option_mode();
let name = option_name();
let sort_choices = vec![
CommandOptionChoice::String {
name: ACCURACY.to_owned(),
value: ACC.to_owned(),
},
CommandOptionChoice::String {
name: COMBO.to_owned(),
value: COMBO.to_owned(),
},
CommandOptionChoice::String {
name: MISSES.to_owned(),
value: MISSES.to_owned(),
},
CommandOptionChoice::String {
name: "pp".to_owned(),
value: "pp".to_owned(),
},
CommandOptionChoice::String {
name: RANK.to_owned(),
value: RANK.to_owned(),
},
CommandOptionChoice::String {
name: SCORE.to_owned(),
value: SCORE.to_owned(),
},
CommandOptionChoice::String {
name: "score date".to_owned(),
value: "date".to_owned(),
},
];
let sort_help = "Choose how the scores should be ordered.\n\
If not specified, it orders them by score date.";
let sort = MyCommandOption::builder(SORT, "Choose how the scores should be ordered")
.help(sort_help)
.string(sort_choices, false);
let mods = option_mods_explicit();
let min_rank = option_min_rank();
let max_rank = option_max_rank();
let min_acc =
MyCommandOption::builder("min_acc", "Specify a min accuracy between 0.0 and 100.0")
.min_num(0.0)
.max_num(100.0)
.number(Vec::new(), false);
let max_acc =
MyCommandOption::builder("max_acc", "Specify a max accuracy between 0.0 and 100.0")
.min_num(0.0)
.max_num(100.0)
.number(Vec::new(), false);
let reverse =
MyCommandOption::builder(REVERSE, "Reverse the resulting score list").boolean(false);
let discord = option_discord();
let scores_description = "All scores of a player that are on a map's global leaderboard";
let scores = MyCommandOption::builder("scores", scores_description).subcommand(vec![
mode, name, sort, mods, min_rank, max_rank, min_acc, max_acc, reverse, discord,
]);
let description = "Stats about players' appearances in maps' leaderboards";
let help = "Stats about scores that players have on maps' global leaderboards.\n\
All data is provided by [osustats](https://osustats.ppy.sh/).\n\
Note that the data usually __updates once per day__.";
MyCommand::new("osustats", description)
.help(help)
.options(vec![count, players, scores])
}
|
//!Combining several log systems into one.
use cluExtIO::UnionWrite;
use crate::log_core::LogStatic;
use crate::log_core::LogLockIO;
use crate::log_core::LogBase;
use crate::log_core::LogFlush;
use crate::log_core::LogExtend;
use std::io::Write;
use std::fmt::Arguments;
use std::marker::PhantomData;
use std::io;
#[derive(Debug)]
pub struct LogUnion<'a, A: LogExtend<'a>, B: LogExtend<'a>>(A, B, PhantomData<&'a ()>);
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogUnion<'a, A, B> {
#[inline(always)]
pub fn new(a: A, b: B) -> Self {
LogUnion(
a, b, PhantomData,
)
}
#[inline(always)]
pub fn new_b(a: A, b: B) -> Box<Self>{
Box::new(Self::new(a, b))
}
#[inline(always)]
pub fn to_box(self) -> Box<Self> {
Box::new(self)
}
}
impl<'a, A: LogExtend<'a> + Clone, B: LogExtend<'a> + Clone> Clone for LogUnion<'a, A, B> {
#[inline(always)]
fn clone(&self) -> Self {
LogUnion::new(self.0.clone(), self.1.clone())
}
}
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogFlush<'a> for LogUnion<'a, A, B> {
#[inline(always)]
fn flush_out(&'a self) -> io::Result<()> {
let e = self.0.flush_out();
let e2 = self.1.flush_out();
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn flush_err(&'a self) -> io::Result<()> {
let e = self.0.flush_err();
let e2 = self.1.flush_err();
if let Err(_) = e {
return e;
}
e2
}
}
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogBase<'a> for LogUnion<'a, A, B> {
#[inline(always)]
fn warning<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.warning(args);
let e2 = self.1.warning(args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn info<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.info(args);
let e2 = self.1.info(args);
if let Err(e) = e {
return Err(e);
}
e2
}
#[inline(always)]
fn error<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.error(args);
let e2 = self.1.error(args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn panic<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.panic(args);
let e2 = self.1.panic(args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn unknown<'l>(&'a self, name: &'static str, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.unknown(name, args);
let e2 = self.1.unknown(name, args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn trace<'l>(&'a self, line: u32, pos: u32, file: &'static str, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.trace(line, pos, file, args);
let e2 = self.1.trace(line, pos, file, args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn print<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.print(args);
let e2 = self.1.print(args);
if let Err(_) = e {
return e;
}
e2
}
#[inline(always)]
fn eprint<'l>(&'a self, args: Arguments<'l>) -> io::Result<()> {
let e = self.0.eprint(args);
let e2 = self.1.eprint(args);
if let Err(_) = e {
return e;
}
e2
}
}
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogLockIO<'a> for LogUnion<'a, A, B> {
#[inline(always)]
fn raw_lock_out(&'a self) -> Box<Write + 'a> {
Box::new(UnionWrite::new(self.0.raw_lock_out(), self.1.raw_lock_out()))
}
#[inline(always)]
fn raw_lock_err(&'a self) -> Box<Write + 'a> {
Box::new(UnionWrite::new(self.0.raw_lock_err(), self.1.raw_lock_err()))
}
}
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogExtend<'a> for LogUnion<'a, A, B> {}
impl<'a, A: LogExtend<'a>, B: LogExtend<'a>> LogStatic<'a> for LogUnion<'a, A, B> {}
/*
Log_base![LogUnion< 'a + A = LogExtend<'a>, B = LogExtend<'a> >:
trace[line, pos, file, args] => {
let e = self.0.trace(line, pos, file, args);
let e2 = self.1.trace(line, pos, file, args);
if let Err(_) = e {
return e;
}
e2
};
unknown[name, args] => {
let e = self.0.unknown(name, args);
let e2 = self.1.unknown(name, args);
if let Err(_) = e {
return e;
}
e2
};
warning[args] => {
let e = self.0.warning(args);
let e2 = self.1.warning(args);
if let Err(_) = e {
return e;
}
e2
};
info[args] => {
let e = self.0.info(args);
let e2 = self.1.info(args);
if let Err(e) = e {
return Err(e);
}
e2
};
error[args] => {
let e = self.0.error(args);
let e2 = self.1.error(args);
if let Err(_) = e {
return e;
}
e2
};
panic[args] => {
let e = self.0.panic(args);
let e2 = self.1.panic(args);
if let Err(_) = e {
return e;
}
e2
};
print[args] => {
let e = self.0.print(args);
let e2 = self.1.print(args);
if let Err(_) = e {
return e;
}
e2
};
eprint[args] => {
let e = self.0.eprint(args);
let e2 = self.1.eprint(args);
if let Err(_) = e {
return e;
}
e2
};
];
*/ |
#![no_std]
pub mod rtt;
|
use std::mem::ManuallyDrop;
use std::slice;
use napi::*;
use crate::sk::Bitmap;
#[derive(Debug, Clone)]
pub struct ImageData {
pub(crate) width: usize,
pub(crate) height: usize,
pub(crate) data: *const u8,
}
impl Drop for ImageData {
fn drop(&mut self) {
let len = (self.width * self.height * 4) as usize;
unsafe { Vec::from_raw_parts(self.data as *mut u8, len, len) };
}
}
impl ImageData {
pub fn create_js_class(env: &Env) -> Result<JsFunction> {
env.define_class("ImageData", image_data_constructor, &[])
}
}
#[js_function(3)]
fn image_data_constructor(ctx: CallContext) -> Result<JsUndefined> {
let first_arg = ctx.get::<JsUnknown>(0)?;
let first_arg_type = first_arg.get_type()?;
let ((js_width, width), (js_height, height), arraybuffer_length, mut initial_data) =
match first_arg_type {
ValueType::Number => {
let js_width = unsafe { first_arg.cast::<JsNumber>() };
let js_height = ctx.get::<JsNumber>(1)?;
let width = js_width.get_uint32()?;
let height = js_height.get_uint32()?;
let arraybuffer_length = (width * height * 4) as usize;
Ok((
(js_width, width),
(js_height, height),
arraybuffer_length,
ManuallyDrop::new(vec![0u8; arraybuffer_length]),
))
}
ValueType::Object => {
let image_data_ab = unsafe { first_arg.cast::<JsTypedArray>() }.into_value()?;
if image_data_ab.typedarray_type != TypedArrayType::Uint8Clamped {
return Err(Error::new(
Status::InvalidArg,
"ImageData constructor: Argument 1 does not implement interface Uint8ClampedArray."
.to_owned(),
));
}
let arraybuffer_length = image_data_ab.len();
let js_width = ctx.get::<JsNumber>(1)?;
let width = js_width.get_uint32()?;
let (js_height, height) = if ctx.length == 3 {
let js_height = ctx.get::<JsNumber>(2)?;
let height = js_height.get_uint32()?;
if height * width * 4 != arraybuffer_length as u32 {
return Err(Error::new(
Status::InvalidArg,
"Index or size is negative or greater than the allowed amount".to_owned(),
));
}
(js_height, height)
} else {
let height = arraybuffer_length as u32 / width / 4u32;
(ctx.env.create_uint32(height)?, height)
};
Ok((
(js_width, width),
(js_height, height),
arraybuffer_length,
ManuallyDrop::new(unsafe {
slice::from_raw_parts(image_data_ab.as_ptr() as *const u8, arraybuffer_length)
.to_owned()
}),
))
}
_ => Err(Error::new(
Status::InvalidArg,
format!(
"Invalid type of first argument of ImageData constructor [{:?}]",
first_arg_type
),
)),
}?;
let data_ptr = initial_data.as_mut_ptr();
let image_data = ImageData {
width: width as usize,
height: height as usize,
data: data_ptr,
};
let arraybuffer = unsafe {
ctx
.env
.create_arraybuffer_with_borrowed_data(data_ptr, arraybuffer_length, 0, noop_finalize)
}?;
let typed_array =
arraybuffer
.into_raw()
.into_typedarray(TypedArrayType::Uint8Clamped, arraybuffer_length, 0)?;
let mut this = ctx.this_unchecked::<JsObject>();
ctx.env.wrap(&mut this, image_data)?;
this.define_properties(&[
Property::new(&ctx.env, "data")?
.with_value(typed_array)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&ctx.env, "width")?
.with_value(js_width)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&ctx.env, "height")?
.with_value(js_height)
.with_property_attributes(PropertyAttributes::Enumerable),
])?;
ctx.env.get_undefined()
}
pub struct Image {
pub bitmap: Option<Bitmap>,
pub complete: bool,
pub alt: String,
}
impl Image {
pub fn create_js_class(env: &Env) -> Result<JsFunction> {
env.define_class(
"Image",
image_constructor,
&vec![
Property::new(&env, "width")?
.with_getter(get_width)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&env, "height")?
.with_getter(get_height)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&env, "naturalWidth")?
.with_getter(get_width)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&env, "naturalHeight")?
.with_getter(get_height)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&env, "complete")?
.with_getter(get_complete)
.with_property_attributes(PropertyAttributes::Enumerable),
Property::new(&env, "alt")?
.with_setter(set_alt)
.with_getter(get_alt),
Property::new(&env, "src")?
.with_setter(set_src)
.with_getter(get_src),
],
)
}
}
#[js_function]
fn image_constructor(ctx: CallContext) -> Result<JsUndefined> {
let js_image = Image {
complete: false,
bitmap: None,
alt: "".to_string(),
};
let mut this = ctx.this_unchecked::<JsObject>();
this.set_named_property("_src", ctx.env.get_undefined()?)?;
ctx.env.wrap(&mut this, js_image)?;
ctx.env.get_undefined()
}
#[js_function]
fn get_width(ctx: CallContext) -> Result<JsNumber> {
let this = ctx.this_unchecked::<JsObject>();
let image = ctx.env.unwrap::<Image>(&this)?;
ctx
.env
.create_double(image.bitmap.as_ref().unwrap().width as f64)
}
#[js_function]
fn get_height(ctx: CallContext) -> Result<JsNumber> {
let this = ctx.this_unchecked::<JsObject>();
let image = ctx.env.unwrap::<Image>(&this)?;
ctx
.env
.create_double(image.bitmap.as_ref().unwrap().height as f64)
}
#[js_function]
fn get_complete(ctx: CallContext) -> Result<JsBoolean> {
let this = ctx.this_unchecked::<JsObject>();
let image = ctx.env.unwrap::<Image>(&this)?;
ctx.env.get_boolean(image.complete)
}
#[js_function]
fn get_alt(ctx: CallContext) -> Result<JsString> {
let this = ctx.this_unchecked::<JsObject>();
let image = ctx.env.unwrap::<Image>(&this)?;
ctx.env.create_string(image.alt.as_str())
}
#[js_function(1)]
fn set_alt(ctx: CallContext) -> Result<JsUndefined> {
let this = ctx.this_unchecked::<JsObject>();
let mut image = ctx.env.unwrap::<Image>(&this)?;
let arg = ctx.get::<JsString>(0)?.into_utf8()?;
image.alt = arg.as_str()?.to_string();
ctx.env.get_undefined()
}
#[js_function]
fn get_src(ctx: CallContext) -> Result<JsUnknown> {
let this = ctx.this_unchecked::<JsObject>();
this.get_named_property("_src")
}
#[js_function(1)]
fn set_src(ctx: CallContext) -> Result<JsUndefined> {
let mut this = ctx.this_unchecked::<JsObject>();
let src_arg = ctx.get::<JsBuffer>(0)?;
let src_data = src_arg.into_value()?;
let image = ctx.env.unwrap::<Image>(&this)?;
let length = (&src_data).len();
image.complete = true;
image
.bitmap
.get_or_insert(Bitmap::from_buffer(src_data.as_ptr() as *mut u8, length));
this.set_named_property("_src", src_data.into_raw())?;
ctx.env.get_undefined()
}
|
use std::hash::Hash;
use std::fmt::Debug;
use crate::machine::*;
#[derive(Debug)]
pub struct ParallelMachine<A, S, C> {
pub id: String,
pub machines: Vec<Machine<A, S, C>>,
pub value: Vec<S>
}
impl<A: Copy, S: Eq + Hash + Copy, C: Debug + Copy> ParallelMachine<A, S, C> {
/// Create a new state machine
pub fn new(id: String, machines: Vec<Machine<A, S, C>>) -> Self {
let value = machines.iter().map(|machine| machine.value).collect();
ParallelMachine { id, machines, value }
}
/// Send an action to the state machines
pub fn transition(&mut self, action: &A) {
for machine in self.machines.iter_mut() {
machine.transition(action);
}
self.value = self.machines.iter().map(|machine| machine.value).collect();
}
}
|
//! Manage and deduplicate ratings and interactions.
//!
//! This code consolidates rating de-duplication into a single place, so we can use the same
//! logic across data sets. We always store timestamps, dropping them at output time, because
//! the largest data sets have timestamps. Saving space for the smallest data set doesn't
//! seem worthwhile.
use anyhow::Result;
use std::path::Path;
mod actions;
mod ratings;
pub use actions::*;
pub use ratings::*;
/// Trait for an interaction.
pub trait Interaction {
fn get_user(&self) -> i32;
fn get_item(&self) -> i32;
fn get_rating(&self) -> Option<f32>;
fn get_timestamp(&self) -> i64;
}
/// Interface for de-duplicating interactions.
pub trait Dedup<I: Interaction> {
/// Save an item in the deduplciator.
fn add_interaction(&mut self, act: I) -> Result<()>;
/// Write the de-duplicated reuslts to a file.
fn save(&mut self, path: &Path) -> Result<usize>;
}
#[derive(Debug, Hash, Eq, PartialEq)]
struct Key {
user: i32,
item: i32,
}
impl Key {
fn new(user: i32, item: i32) -> Key {
Key { user, item }
}
}
|
use macroquad::prelude::*;
#[derive(Default)]
struct Path {
nodes: Vec<Vec2>,
}
impl Path {
fn debug_draw(&self) {
for w in self.nodes.windows(2) {
let (n1, n2) = (w[0], w[1]);
draw_line(n1.x, n1.y, n2.x, n2.y, 2.0, RED);
}
}
fn project_next_at_mouse(&self) {
let (mx, my) = mouse_position();
match self.nodes.last() {
Some(last) => {
draw_line(last.x, last.y, mx, my, 2.0, YELLOW);
}
None => {
draw_circle(mx, my, 8.0, YELLOW);
}
}
}
}
struct Enemy {
pos: Vec2,
path_pos: usize,
}
impl Enemy {
pub fn debug_draw(&self) {
draw_circle(self.pos.x, self.pos.y, 4.0, GREEN);
}
pub fn spawn_on_path(path: &Path) -> Self {
Self {
pos: path.nodes[0],
path_pos: 0,
}
}
pub fn advance(&mut self, path: &Path) {
let next = match path.nodes.get(self.path_pos) {
Some(next) => *next,
None => return,
};
let speed = 2.4;
let diff = next - self.pos;
let distance_sq = diff.x.powf(2.0) + diff.y.powf(2.0);
if distance_sq.sqrt() < speed {
self.path_pos += 1;
return;
}
let angle = diff.y.atan2(diff.x);
let x_move = angle.cos() * speed;
let y_move = angle.sin() * speed;
self.pos.x += x_move;
self.pos.y += y_move;
draw_line(self.pos.x, self.pos.y, next.x, next.y, 2.0, BLUE);
}
}
#[macroquad::main("TD Project")]
async fn main() {
let mut path = Path::default();
let mut editing = true;
let mut enemies = Vec::new();
loop {
let (mx, my) = mouse_position();
if is_mouse_button_pressed(MouseButton::Left) && editing {
path.nodes.push(Vec2::new(mx, my));
}
if is_key_pressed(KeyCode::C) {
path.nodes.clear();
enemies.clear();
}
if is_key_pressed(KeyCode::E) {
editing = !editing;
}
if is_key_pressed(KeyCode::S) && !path.nodes.is_empty() {
enemies.push(Enemy::spawn_on_path(&path));
}
path.debug_draw();
for enemy in &mut enemies {
enemy.advance(&path);
enemy.debug_draw();
}
if editing {
path.project_next_at_mouse();
}
if editing {
draw_text("(E)dit mode on: Click to place nodes", 0., 24., 24., WHITE);
} else {
draw_text("(E)dit mode off", 0., 24., 24., WHITE);
}
if !path.nodes.is_empty() {
draw_text(
&format!("(S)pawned enemies: {}", enemies.len()),
0.,
48.,
24.,
WHITE,
);
draw_text("(C)lear", 0., 3. * 24., 24., WHITE);
}
next_frame().await
}
}
|
#[test]
fn normalize_test() {
assert!(::mongo_config::AGG_FUNCTIONS.contains_key("not"));
}
|
#![macro_use]
use middle::middle::*;
use mangle::Mangle;
use ast::name::Symbol;
use context::Context;
use stack::Stack;
use std::fmt;
use std::borrow::ToOwned;
macro_rules! emit {
( $($instr: expr),+ ; $($comment: expr),+ ) => (
println!("{:<40} ; {}", format!($($instr),+), format!($($comment),+))
);
( $($instr: expr),+ ) => (
println!($($instr),+)
);
}
pub fn emit_block<'a, 'ast>(ctx: &Context<'a, 'ast>,
stack: &Stack,
block: &TypedBlock<'a, 'ast>) {
stack.scope(|stack| {
use middle::middle::TypedBlockStatement_::*;
for stmt in block.stmts.iter() {
match stmt.node {
LocalVariable(ref var) => emit_variable(ctx, stack, var),
Statement(ref stmt) => emit_statement(ctx, stack, stmt),
}
}
});
}
pub fn emit_variable<'a, 'ast>(ctx: &Context<'a, 'ast>,
stack: &mut Stack,
var: &TypedLocalVariable<'a, 'ast>) {
emit_expression(ctx, stack, &var.initializer);
emit!("push eax" ; "variable {}", var.variable.fq_name);
stack.add_var(var.variable.fq_name);
}
pub fn emit_statement<'a, 'ast>(ctx: &Context<'a, 'ast>,
stack: &Stack,
stmt: &TypedStatement<'a, 'ast>) {
use middle::middle::TypedStatement_::*;
match stmt.node {
Expression(ref expr) => {
emit_expression(ctx, stack, expr);
}
If(ref expr, box ref ift, ref iff) => {
emit!("" ; "> begin if statement");
emit_expression(ctx, stack, expr);
// Is the result zero?
emit!("test eax, eax");
// If it is (i.e. false), jump to `iff`.
let false_label = ctx.label();
emit!("jz .L{}", false_label);
// Otherwise, execute `ift`...
emit_statement(ctx, stack, ift);
if let Some(box ref iff) = *iff {
// then jump over `iff`.
let end_label = ctx.label();
emit!("jmp .L{}", end_label);
emit!(".L{}:", false_label);
emit_statement(ctx, stack, iff);
emit!(".L{}:", end_label);
} else {
// and we're done.
emit!(".L{}:", false_label);
}
emit!("" ; "> end if statement");
}
While(ref expr, box ref inner) => {
emit!("" ; "> begin while statement");
let top_label = ctx.label();
emit!(".L{}:", top_label);
emit_expression(ctx, stack, expr);
// Is the result zero?
emit!("test eax, eax");
// If it is (i.e. false), jump to the end.
let end_label = ctx.label();
emit!("jz .L{}", end_label);
// Otherwise, run the body...
emit_statement(ctx, stack, inner);
// and go back to the top.
emit!("jmp .L{}", top_label);
emit!(".L{}:", end_label);
emit!("" ; "> end while statement");
}
For(ref init, ref test, ref update, box ref inner) => {
emit!("" ; "> begin for statement");
if let Some(ref init) = *init {
emit_expression(ctx, stack, init);
}
let top_label = ctx.label();
let end_label = ctx.label();
emit!(".L{}:", top_label);
if let Some(ref test) = *test {
emit_expression(ctx, stack, test);
emit!("test eax, eax");
emit!("jz .L{}", end_label);
}
emit_statement(ctx, stack, inner);
if let Some(ref update) = *update {
emit_expression(ctx, stack, update);
}
emit!("jmp .L{}", top_label);
emit!(".L{}:", end_label);
emit!("" ; "> end for statement");
}
ForDecl(ref var, ref test, ref update, box ref inner) => {
emit!("" ; "> begin for statement");
stack.scope(|stack| {
emit_variable(ctx, stack, var);
let top_label = ctx.label();
let end_label = ctx.label();
emit!(".L{}:", top_label);
if let Some(ref test) = *test {
emit_expression(ctx, stack, test);
emit!("test eax, eax");
emit!("jz .L{}", end_label);
}
emit_statement(ctx, stack, inner);
if let Some(ref update) = *update {
emit_expression(ctx, stack, update);
}
emit!("jmp .L{}", top_label);
emit!(".L{}:", end_label);
});
emit!("" ; "> end for statement");
}
Empty => (),
Return(ref expr) => {
if let Some(ref expr) = *expr {
emit_expression(ctx, stack, expr);
}
// Result is already in `eax`, if any.
// Just need to unwind the stack.
emit!("mov esp, ebp");
emit!("pop ebp");
emit!("ret 4*{}", stack.args ; "pop {} args off stack after return", stack.args);
}
Block(ref block) => emit_block(ctx, stack, block),
}
}
// Ensure that `eax` is not null.
pub fn check_null() {
emit!("test eax, eax" ; "check null");
emit!("jz __exception" ; "null exception");
}
pub struct ConstantValue<'a: 'c + 'v, 'ast: 'a, 'c, 'v>(pub &'c Context<'a, 'ast>, pub &'v Value);
impl<'a, 'ast, 'c, 'v> fmt::Display for ConstantValue<'a, 'ast, 'c, 'v> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self.1 {
Value::Int(v) => write!(f, "{}", v),
Value::Short(v) => write!(f, "{}", v),
Value::Char(v) => write!(f, "{}", v),
Value::Byte(v) => write!(f, "{}", v),
Value::Bool(v) => write!(f, "{}", if v { 1 } else { 0 }),
Value::String(ref v) => write!(f, "stringstruct#{}", self.0.string_constants.get(v).unwrap()),
}
}
}
pub fn sizeof_simple_ty(ty: &SimpleType) -> u32 {
match *ty {
SimpleType::Char | SimpleType::Short => 2,
SimpleType::Byte | SimpleType::Boolean => 1,
_ => 4,
}
}
pub fn sizeof_ty(ty: &Type) -> u32 {
match *ty {
Type::SimpleType(ref simple_ty) => sizeof_simple_ty(simple_ty),
_ => 4,
}
}
pub fn sizeof_array_element(ty: &Type) -> u32 {
match *ty {
Type::ArrayType(ref simple_ty) => sizeof_simple_ty(simple_ty),
_ => panic!("not an array type: {}", ty),
}
}
pub fn size_name(size: u32) -> &'static str {
match size {
1 => "byte",
2 => "word",
4 => "dword",
_ => panic!("bad size {}", size),
}
}
pub fn short_size_name(size: u32) -> char {
match size {
1 => 'b',
2 => 'w',
4 => 'd',
_ => panic!("bad size {}", size),
}
}
// Return the appropriate `mov` instruction to load
// from a location of type `ty`, to a 32-bit register.
pub fn load_simple_ty(ty: &SimpleType) -> &'static str {
match *ty {
SimpleType::Byte | SimpleType::Short => "movsx",
SimpleType::Boolean | SimpleType::Char => "movzx",
SimpleType::Int | SimpleType::Other(_) => "mov"
}
}
pub fn load_array_ty(ty: &Type) -> &'static str {
match *ty {
Type::ArrayType(ref simple_ty) => load_simple_ty(simple_ty),
_ => panic!("non-array type")
}
}
pub fn load_ty(ty: &Type) -> &'static str {
match *ty {
Type::SimpleType(ref simple_ty) => load_simple_ty(simple_ty),
Type::ArrayType(_) => "mov",
Type::Void | Type::Null | Type::Unknown => panic!("non-concrete type")
}
}
// Return the sub-register of `eax` of size `size`.
pub fn eax_lo(size: u32) -> &'static str {
match size {
4 => "eax",
2 => "ax",
1 => "al",
_ => panic!("bad size {}", size),
}
}
pub fn desc(ty: &SimpleType) -> String {
use middle::middle::SimpleType::*;
match *ty {
Boolean => format!("BOOLEANDESC"),
Int => format!("INTDESC"),
Short => format!("SHORTDESC"),
Char => format!("CHARDESC"),
Byte => format!("BYTEDESC"),
Other(ref tydef) => format!("DESC{}", tydef.mangle()),
}
}
pub fn emit_expression<'a, 'ast>(ctx: &Context<'a, 'ast>,
stack: &Stack,
expr: &TypedExpression<'a, 'ast>) {
use middle::middle::TypedExpression_::*;
match expr.node {
Constant(ref val) => emit!("mov eax, {}", ConstantValue(ctx, val)),
Null => emit!("xor eax, eax"), // eax = 0
This => emit!("mov eax, [ebp+4*{}]", stack.this_index()),
NewStaticClass(tydef, ref constructor, ref args) => {
emit!("" ; "Begin allocate {}", tydef.fq_name);
emit!("push dword 0" ; "reserve space to store `this`");
// Generate argument code.
for arg in args.iter() {
emit_expression(ctx, stack, arg);
emit!("push eax");
}
emit!("call ALLOC{}", tydef.mangle());
emit!("mov [esp+4*{}], eax", args.len() ; "store `this` into reserved space");
emit!("call {}", constructor.mangle());
emit!("pop eax" ; "recover `this`");
emit!("" ; "End allocate {}", tydef.fq_name);
}
NewArray(ref ty, box ref expr) => {
emit!(""; "Begin allocate array of type {}[]", ty);
emit_expression(ctx, stack, expr);
emit!("push eax" ; "save the length of the register");
emit!("lea eax, [{}*eax + ARRAYLAYOUT.elements]", sizeof_simple_ty(ty));
emit!("call __malloc");
emit!("mov dword [eax+VPTR], ARRAYDESC");
emit!("mov dword [eax+ARRAYLAYOUT.tydesc], {}", desc(ty));
emit!("pop ebx");
emit!("mov [eax+ARRAYLAYOUT.len], ebx" ; "store length of array");
emit!(""; "End allocate array of type {}[]", ty);
}
Variable(var) => emit!("mov eax, [ebp+4*{}]", stack.var_index(var.fq_name)
; "variable {}", var.fq_name),
StaticFieldAccess(field) => {
emit!("{} eax, {} [{}]",
load_ty(&field.ty),
size_name(sizeof_ty(&field.ty)), field.mangle());
}
FieldAccess(box ref expr, field) => {
emit_expression(ctx, stack, expr);
check_null();
emit!("{} eax, {} [eax+{}]",
load_ty(&field.ty),
size_name(sizeof_ty(&field.ty)), field.mangle()
; "access field {}", field.fq_name);
}
ThisFieldAccess(field) => {
emit!("mov eax, [ebp+4*{}]", stack.this_index() ; "this");
emit!("{} eax, {} [eax+{}]",
load_ty(&field.ty),
size_name(sizeof_ty(&field.ty)), field.mangle()
; "access field {}", field.fq_name);
}
Assignment(box expr!(Variable(var)), box ref rhs) => {
emit_expression(ctx, stack, rhs);
emit!("mov [ebp+4*{}], eax", stack.var_index(var.fq_name));
}
Assignment(box expr!(StaticFieldAccess(field)), box ref rhs) => {
emit_expression(ctx, stack, rhs);
let field_size = sizeof_ty(&field.ty);
emit!("mov {} [{}], {}",
size_name(field_size), field.mangle(),
eax_lo(field_size));
}
Assignment(box expr!(FieldAccess(box ref expr, field)), box ref rhs) => {
emit_expression(ctx, stack, expr);
// conceptually, the field reference is evaluated here
// (in reality, we do it later)
check_null(); // null check before evaluating RHS
emit!("push eax");
emit_expression(ctx, stack, rhs);
emit!("pop ebx");
let field_size = sizeof_ty(&field.ty);
emit!("mov {} [ebx + {}], {}",
size_name(field_size), field.mangle(),
eax_lo(field_size)
; "set field {}", field.fq_name);
}
Assignment(box expr!(ThisFieldAccess(field)), box ref rhs) => {
emit_expression(ctx, stack, rhs);
emit!("mov ebx, [ebp+4*{}]", stack.this_index() ; "emit");
let field_size = sizeof_ty(&field.ty);
emit!("mov {} [ebx + {}], {}",
size_name(field_size), field.mangle(),
eax_lo(field_size)
; "set field {}", field.fq_name);
}
Assignment(box expr!(ArrayAccess(box ref array_expr, box ref index_expr)), box ref rhs) => {
// NOTE: This is perhaps a bit surprisng. The JLS specifies special handling
// for assignment of arrays. In particular, the RHS must be evaluated before
// the null check and out of bounds access check.
emit_expression(ctx, stack, array_expr);
emit!("push eax");
emit_expression(ctx, stack, index_expr);
emit!("push eax");
emit_expression(ctx, stack, rhs);
emit!("pop edi"); // array index
emit!("pop ecx"); // array location
emit!("test ecx, ecx" ; "check null");
emit!("jz __exception");
emit!("cmp edi, [ecx+ARRAYLAYOUT.len]" ; "check for array out of bounds");
// UNSIGNED compare (if eax is negative, then it will also fail)
emit!("jae __exception");
// check type compatibility
match array_expr.ty {
Type::ArrayType(SimpleType::Other(_)) => {
// save a reference to the object
emit!("test eax, eax");
let skip = ctx.label();
emit!("jz .L{}", skip ; "null is ok");
emit!("mov edx, eax");
emit!("mov ebx, [ecx+ARRAYLAYOUT.tydesc]" ; "get array's runtime type");
emit!("call __instanceof");
emit!("test eax, eax");
emit!("jz __exception");
emit!("mov eax, edx");
emit!(".L{}:", skip);
}
Type::ArrayType(_) => {
// primitive type: no compatibility check required
}
_ => panic!("type of array is not array type"),
}
let size = sizeof_array_element(&array_expr.ty);
emit!("mov [ecx + ARRAYLAYOUT.elements + {} * edi], {}",
size,
eax_lo(size));
}
Assignment(..) => panic!("non-lvalue in assignment"),
ArrayLength(box ref expr) => {
emit_expression(ctx, stack, expr);
check_null();
emit!("mov eax, [eax+ARRAYLAYOUT.len]");
}
MethodInvocation(ref receiver, ref sig, method, ref args) => {
if method.is_static {
assert!(receiver.is_none());
} else {
if let Some(box ref expr) = *receiver {
emit_expression(ctx, stack, expr);
check_null();
} else {
// implicitly `this`
emit!("mov eax, [ebp+4*{}]", stack.this_index());
}
emit!("push eax");
}
for arg in args.iter() {
emit_expression(ctx, stack, arg);
emit!("push eax");
}
if method.is_static {
// No dynamic dispatch: just call the impl.
if let Concrete(method_impl) = method.impled {
emit!("call {}", method_impl.mangle());
} else {
panic!("no impl for static method");
}
} else {
// Grab the reference to the receiver...
// (`args.len()` slots up the stack)
emit!("mov eax, [esp+4*{}]", args.len());
// Look up the type descriptor (first slot).
emit!("mov eax, [eax+VPTR]");
// Now call the method.
// Skip three slots, then look up by method index
emit!("call [eax+TYDESC.methods+4*{}]", ctx.method_index(sig)
; "method {}", sig);
}
// Callee pops the stack, nothing to do here.
}
ArrayAccess(box ref array, box ref ix) => {
emit_expression(ctx, stack, array);
emit!("push eax");
emit_expression(ctx, stack, ix);
emit!("pop ebx");
emit!("test ebx, ebx" ; "check null");
emit!("jz __exception");
// array (not null) in `ebx`
// check index in bounds?
emit!("cmp eax, [ebx+ARRAYLAYOUT.len]");
// UNSIGNED compare (if eax is negative, then it will also fail)
emit!("jae __exception");
// index OK, look up element
let size = sizeof_array_element(&array.ty);
emit!("{} eax, {} [ebx+ARRAYLAYOUT.elements+{}*eax]",
load_array_ty(&array.ty),
size_name(size), size);
}
Prefix(op, box ref expr) => {
use ast::PrefixOperator::*;
emit_expression(ctx, stack, expr);
match op {
Not => {
// always a boolean
emit!("xor eax, 1");
}
Minus => {
emit!("neg eax");
}
}
}
Infix(op, box ref l, box ref r) => {
use ast::InfixOperator::*;
match op {
LazyOr | LazyAnd => {
emit_expression(ctx, stack, l);
emit!("test eax, eax");
let skip = ctx.label();
match op {
LazyOr => emit!("jnz .L{}", skip),
LazyAnd => emit!("jz .L{}", skip),
_ => unreachable!(),
}
emit_expression(ctx, stack, r);
emit!(".L{}:", skip);
}
_ => {
emit_expression(ctx, stack, l);
emit!("push eax");
emit_expression(ctx, stack, r);
emit!("pop ebx");
match op {
LazyOr | LazyAnd => unreachable!(),
Xor => emit!("xor eax, ebx"),
EagerOr => emit!("or eax, ebx"),
EagerAnd => emit!("and eax, ebx"),
Equals | NotEquals
| LessThan | GreaterThan
| LessEqual | GreaterEqual => {
emit!("cmp ebx, eax");
emit!("set{} al", match op {
// Equality is also fine for pointers
Equals => "e",
NotEquals => "ne",
// Numeric comparisons only happen for numbers.
// Use signed comparison.
LessThan => "l",
GreaterThan => "g",
LessEqual => "le",
GreaterEqual => "ge",
_ => unreachable!(),
});
emit!("movzx eax, al");
}
// These operations are commutative.
Plus => emit!("add eax, ebx"),
Mult => emit!("imul ebx"),
// These are not. `eax` and `ebx` are in the wrong order
Minus | Div | Modulo => {
emit!("xchg eax, ebx");
match op {
Minus => emit!("sub eax, ebx"),
Div | Modulo => {
emit!("test ebx, ebx");
emit!("jz __exception" ; "division by zero");
// Special case: (-2^31) / (-1) produces a division error,
// but should instead return (-2^31).
// Meanwhile, (-2^31) % (-1) should return 0.
let skip = ctx.label();
emit!("lea ecx, [2*eax]" ; "ecx = 0 iff eax = -2^31 or 0");
emit!("lea edx, [ebx+1]" ; "edx = 0 iff ebx = -1");
emit!("or ecx, edx" ; "ecx = 0 iff both the above hold");
emit!("jz .L{}", skip ; "in this case, skip the division");
// If the division is skipped, then -eax = eax, while ebx = -1.
// Hence `eax` is the correct result of eax / ebx, while edx =
// 0 is the correct result of eax % ebx.
// Otherwise, do the division properly.
emit!("cdq"); // clear out edx
emit!("idiv ebx");
emit!(".L{}:", skip);
if let Modulo = op {
// remainder in edx
emit!("mov eax, edx");
} // otherwise, quotient in eax
}
_ => unreachable!(),
}
}
}
}
}
}
Concat(box ref expr1, box ref expr2) => {
emit!("" ; "> begin string concat operation");
emit_expression(ctx, stack, expr1);
// null -> "null"
let null = ctx.string_constants["null"];
emit!("test eax, eax");
emit!("mov ebx, stringstruct#{}", null);
emit!("cmovz eax, ebx");
emit!("push eax");
emit_expression(ctx, stack, expr2);
emit!("test eax, eax");
emit!("mov ebx, stringstruct#{}", null);
emit!("cmovz eax, ebx");
emit!("push eax");
// TODO: Fragile if we change naming scheme. Consider revising.
emit!("call METHODjava.lang.String.concat#java.lang.String");
emit!("" ; "> end string concat operation");
}
InstanceOf(box ref expr, ref ty) => {
emit_expression(ctx, stack, expr);
match *ty {
Type::SimpleType(ref ty @ SimpleType::Other(&TypeDefinition {
kind: TypeKind::Interface, ..
})) => {
emit!("mov ebx, {}", desc(ty));
emit!("call __instanceof_interface");
}
Type::SimpleType(ref ty) => {
emit!("mov ebx, {}", desc(ty));
emit!("call __instanceof");
}
Type::ArrayType(ref ty @ SimpleType::Other(&TypeDefinition {
kind: TypeKind::Interface, ..
})) => {
emit!("mov ebx, {}", desc(ty));
emit!("call __instanceof_array_interface");
}
Type::ArrayType(ref ty) => {
emit!("mov ebx, {}", desc(ty));
emit!("call __instanceof_array");
}
_ => panic!("bad type in instanceof")
}
}
RefDowncast(box ref inner_expr) => {
emit_expression(ctx, stack, inner_expr);
emit!("" ; "check reference downcast");
let end = ctx.label();
emit!("test eax, eax");
emit!("jz .L{}", end ; "null is always fine");
match expr.ty {
Type::SimpleType(SimpleType::Other(tydef)) => {
// object must be a subtype of `tydef`
emit!("push eax");
emit!("mov ebx, {}", desc(&SimpleType::Other(tydef)));
match tydef.kind {
TypeKind::Class => emit!("call __instanceof"),
TypeKind::Interface => emit!("call __instanceof_interface"),
}
emit!("test eax, eax");
emit!("jz __exception");
emit!("pop eax");
}
Type::ArrayType(SimpleType::Other(tydef)) => {
emit!("cmp [eax+VPTR], dword ARRAYDESC" ; "check array");
emit!("jne __exception");
emit!("push eax");
emit!("mov eax, [eax+ARRAYLAYOUT.tydesc]");
emit!("mov ebx, {}", desc(&SimpleType::Other(tydef)));
// array element type must be a subtype of `tydef`
match tydef.kind {
TypeKind::Class => emit!("call __instanceof_tydesc"),
TypeKind::Interface => emit!("call __instanceof_tydesc_interface"),
}
emit!("test eax, eax");
emit!("jz __exception");
emit!("pop eax");
// TODO: Handle arrays of interface types :(
// (Need to make interface descriptors recognizable,
// generalize __instanceof)
}
Type::ArrayType(ref elem_ty) => {
// Cast to a primitive array type.
emit!("cmp [eax+VPTR], dword ARRAYDESC" ; "check array");
emit!("jne __exception");
emit!("mov ebx, [eax+ARRAYLAYOUT.tydesc]");
emit!("cmp ebx, {}", desc(elem_ty) ; "primitive array type: check exact match");
emit!("jne __exception");
}
_ => panic!("bad RefDowncast to type {}", expr.ty),
}
emit!(".L{}:", end ; "cast OK");
}
PrimDowncast(box ref inner_expr) => {
emit_expression(ctx, stack, inner_expr);
match expr.ty {
Type::SimpleType(SimpleType::Byte) => emit!("movsx eax, al" ; "cast to byte"),
Type::SimpleType(SimpleType::Char) => emit!("movzx eax, ax" ; "cast to char"),
Type::SimpleType(SimpleType::Short) => emit!("movsx eax, ax" ; "cast to short"),
Type::SimpleType(SimpleType::Int) => emit!("" ; "(cast to int)"),
_ => panic!("bad PrimDowncast to type {}", expr.ty),
}
}
Widen(box ref expr) => {
emit_expression(ctx, stack, expr);
// no operation: reference types all have the same representation,
// while primitive types are already extended to 32 bits
}
ToString(box ref expr) => {
use middle::middle::SimpleType::*;
let tostring_signature = MethodSignature {
name: Symbol::from_str("toString"),
args: vec![],
};
emit!(""; "Begin conversion to string");
match expr.ty {
// reference type
Type::ArrayType(_) | Type::SimpleType(Other(_)) => {
emit_expression(ctx, stack, expr);
// eax contains reference type
emit!("test eax, eax" ; "check null");
let not_null_label = ctx.label();
let end_label = ctx.label();
emit!("jnz .L{}", not_null_label
; "use string \"null\" if reference type is null");
emit!("mov eax, {}",
ConstantValue(ctx, &Value::String("null".to_owned())));
emit!("jmp .L{}", end_label);
emit!(".L{}:", not_null_label);
emit!("push eax");
// Look up the type descriptor (first slot).
emit!("mov eax, [eax+VPTR]");
// Now call the method.
emit!("call [eax+TYDESC.methods+4*{}]", ctx.method_index(&tostring_signature)
; "call toString");
emit!(".L{}:", end_label);
}
// primitive type
Type::SimpleType(ref ty) => {
emit!("" ; " create reference type for primitive conversion to string");
emit!("push dword 0" ; "reserve space to store `this`");
emit_expression(ctx, stack, expr);
emit!("push eax");
let (boxed_type, use_type) = match *ty {
Boolean => (ctx.lang_items.boolean, Boolean),
Byte | Short | Int => (ctx.lang_items.integer, Int),
Char => (ctx.lang_items.character, Char),
Other(..) => panic!("case should have been previously covered"),
};
let arg_type = Type::SimpleType(use_type);
let constructor = boxed_type.constructors.get(&vec![arg_type]).unwrap();
emit!("call ALLOC{}", boxed_type.mangle());
emit!("mov [esp+4], eax" ; "store `this` into reserved space");
emit!("call {}", constructor.mangle());
emit!("pop eax" ; "recover `this`");
check_null();
emit!("push eax");
// Look up the type descriptor (first slot).
emit!("mov eax, [eax+VPTR]");
// Now call the method.
emit!("call [eax+TYDESC.methods+4*{}]", ctx.method_index(&tostring_signature)
; "call toString");
}
Type::Null => {
emit!("mov eax, {}",
ConstantValue(ctx, &Value::String("null".to_owned())));
}
Type::Void | Type:: Unknown =>
panic!("should not be able to print Void or Unknown"),
}
emit!(""; "End conversion to string");
}
}
}
|
use serde::{Deserialize, Serialize};
use std::error::Error;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
pub token: String,
pub admin_user_id: i64,
pub commands: std::collections::HashMap<String, Command>
}
impl Config {
pub fn from_config() -> Result<Config, Box<Error>> {
let file = std::fs::File::open(String::from("./config.json")).unwrap();
let reader = std::io::BufReader::new(file);
Ok(serde_json::from_reader(reader)?)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Command {
pub name: String,
pub script: String,
} |
use serenity::builder::CreateEmbed;
use serenity::framework::standard::CommandError;
use serenity::model::channel::Message;
use serenity::prelude::Context;
use crate::db::*;
use crate::model::{GameServer, GameServerState};
pub fn lobbies(context: &mut Context, message: &Message) -> Result<(), CommandError> {
let data = context.data.lock();
let db_conn = data
.get::<DbConnectionKey>()
.ok_or_else(|| CommandError("No db connection".to_string()))?;
let lobbies_and_player_count = db_conn.select_lobbies()?;
if lobbies_and_player_count.is_empty() {
message.reply(&"No available lobbies")?;
} else {
let embed = lobbies_helper(lobbies_and_player_count)?;
message.channel_id.send_message(|m| m.embed(|_| embed))?;
}
Ok(())
}
fn lobbies_helper(
lobbies_and_player_count: Vec<(GameServer, i32)>,
) -> Result<CreateEmbed, CommandError> {
let mut aliases = String::new();
let mut player_counts = String::new();
for (lobby, registered_count) in lobbies_and_player_count {
aliases.push_str(&format!("{}\n", lobby.alias));
if let GameServerState::Lobby(state) = lobby.state {
player_counts.push_str(&format!("{}/{}\n", registered_count, state.player_count));
} else {
player_counts.push_str(&"ERROR");
}
}
let embed = CreateEmbed::default()
.title("Lobbies")
.field("Alias", aliases, true)
.field("Players", player_counts, true);
Ok(embed)
}
|
use crate::input::DbValue;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
ModelNotFound(String),
FieldNotFound(String),
InvalidValue(String),
InvalidJson(String),
Rusqlite(rusqlite::Error),
Chrono(chrono::ParseError),
Io(std::io::Error),
}
impl Error {
pub fn invalid_value<T>(expected: &str, field: &T, value: &DbValue) -> Self
where
T: crate::field::Field,
{
Error::InvalidValue(format!(
"Expected {} in {}, got {:?}",
expected,
field.name(),
value
))
}
pub fn invalid_json<T>(expected: &str, field: &T, value: &serde_json::Value) -> Self
where
T: crate::field::Field,
{
Error::InvalidJson(format!(
"Expected {} in {}, got {:?}",
expected,
field.name(),
value
))
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::Rusqlite(err) => Some(err),
_ => None,
}
}
}
impl From<rusqlite::Error> for Error {
fn from(err: rusqlite::Error) -> Error {
Error::Rusqlite(err)
}
}
impl From<chrono::ParseError> for Error {
fn from(err: chrono::ParseError) -> Error {
Error::Chrono(err)
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Error {
Error::Io(err)
}
}
|
//! The crate `nathru` (NUmber THeory in RUst) implements a few simple number theory functions.
use std::cmp;
#[cfg(test)]
mod tests {
use super::*;
#[test] fn test1a() { assert_eq!(gcd(945, 165), 15); }
#[test] fn test1b() { assert_eq!(gcd(945, -165), -15); }
#[test] fn test2a() { assert_eq!(power_mod(2, 1000, 331), 31); }
#[test] fn test2b() { assert_eq!(power_mod(200, 10000, 541), 80); }
#[test] fn test2c() { assert_eq!(power_mod(12345, 165, 331), 330); }
#[test] fn test3a() { assert_eq!(legendre_symbol(12345, 331), -1); }
#[test] fn test4a() { assert_eq!(gcd(45261354, 45680756), 2); }
#[test] fn test4b() { assert_eq!(gcd(4512261354, 45680127564), 6); }
}
/// This function complements the % operator, as it behaves like the normal mathematical mod operator even for negative inputs.
pub fn modulo(n: i64, m: i64) -> i64 {
if n >= 0 { n % m } else { - (-n) % m }
}
/// Computes powers modulo m
pub fn power_mod(b: i64, exp: i64, m: i64) -> i64 {
if exp < 0 { unimplemented!() }
let mut bm = b % m;
let mut res = 1;
let mut e = exp;
while e > 0 {
if e & 1 != 0 { res = (res * bm) % m }
bm = (bm * bm) % m;
e >>= 1
}
modulo(res, m)
}
/// The greatest common divisor of a and b
pub fn gcd(a: i64, b: i64) -> i64
{
if b == 0 { a } else { gcd(b, a % b) }
}
/// The integer square root
pub fn int_sqrt(n: i64) -> Option<i64> {
let t = (n as f64).sqrt() as i64;
if t*t == n { return Some(t) }
if (t+1)*(t+1) == n { return Some(t+1) }
None
}
/// Legendre symbol, a multiplicative function with 1, 0, or -1 mod p. Spcifically, its value on a (nonzero) quadratic residue mod p is 1 and on a non-quadratic residue (non-residue) is −1. Its value on zero is 0.
/// See https://en.wikipedia.org/wiki/Legendre_symbol.
pub fn legendre_symbol(a: i64, p: i64) -> i64 {
let res = power_mod(a, (p-1)/2, p);
if res == p-1 { -1 } else { res }
}
/// The Tonelli–Shanks algorithm finds solutions to x^2 = n mod p, where p is an odd prime.
// We are following the notation in https://en.wikipedia.org/wiki/Tonelli–Shanks_algorithm (WP)
pub fn tonelli_shanks(n: i64, p: i64) -> (i64, i64, bool) {
if legendre_symbol(n, p) != 1 { return (0, 0, false) }
// WP step 1, factor out powers two.
// variables Q, S named as at WP.
let mut q = p - 1;
let mut s = 0;
while q & 1 == 0 {
s += 1;
q >>= 1
}
// WP step 1, direct solution
if s == 1 {
let r1 = power_mod(n, (p+1)/4, p);
return (r1, p - r1, true)
}
// WP step 2, select z, assign c
let mut z = 2;
while legendre_symbol(z, p) != -1 { z += 1 }
let mut c = power_mod(z, q, p);
// WP step 3, assign R, t, M
let mut r = power_mod(n, (q+1)/2, p);
let mut t = power_mod(n, q, p);
let mut m = s;
// WP step 4, loop
loop {
// WP step 4.1, termination condition
if t == 1 { return (r, p - r, true) }
// WP step 4.2, find lowest i...
let mut i = 0;
let mut z = t;
while z != 1 && i < m-1 {
z = z * z % p;
i += 1
}
// WP step 4.3, using a variable b, assign new values of R, t, c, M
let mut b = c;
let mut e = m - i - 1;
while e > 0 {
b = b * b % p;
e -= 1
}
r = r * b % p;
c = b * b % p;
t = t * c % p;
m = i;
}
}
/// Finds integer solution to x^2 + y^2 = p. See https://en.wikipedia.org/wiki/Cornacchia's_algorithm.
pub fn cornacchia(p: i64) -> (i64, i64) {
if p == 1 { return (1, 0)}
if p == 2 { return (1, 1)}
if p % 4 != 1 { panic!(""); }
let res = tonelli_shanks(p-1, p);
let mut a = p;
let mut b = cmp::max(res.0, res.1);
let l = (p as f64).sqrt() as i64;
while b > l {
let r = a % b;
a = b;
b = r;
}
let c = p - b*b;
(b, int_sqrt(c).unwrap())
}
|
#[cfg_attr(not(feature = "verbose"), allow(unused_variables))]
#[cfg(test)]
mod tests {
use bbqueue::{BBBuffer, Error};
use rand::prelude::*;
use std::thread::spawn;
use std::time::{Duration, Instant};
#[cfg(feature = "travisci")]
const ITERS: usize = 10_000;
#[cfg(not(feature = "travisci"))]
const ITERS: usize = 10_000_000;
const RPT_IVAL: usize = ITERS / 100;
const QUEUE_SIZE: usize = 1024;
const TIMEOUT_NODATA: Duration = Duration::from_millis(10_000);
#[test]
fn randomize_tx() {
#[cfg(feature = "travisci")]
#[cfg(feature = "verbose")]
println!("Hello Travis!");
#[cfg(feature = "verbose")]
println!("RTX: Generating Test Data...");
let gen_start = Instant::now();
let mut data = Vec::with_capacity(ITERS);
(0..ITERS).for_each(|_| data.push(rand::random::<u8>()));
let mut data_rx = data.clone();
let mut trng = thread_rng();
let mut chunks = vec![];
while !data.is_empty() {
let chunk_sz = trng.gen_range(1, (1024 - 1) / 2);
if chunk_sz > data.len() {
continue;
}
// Note: This gives back data in chunks in reverse order.
// We later .rev()` this to fix it
chunks.push(data.split_off(data.len() - chunk_sz));
}
#[cfg(feature = "verbose")]
println!("RTX: Generation complete: {:?}", gen_start.elapsed());
#[cfg(feature = "verbose")]
println!("RTX: Running test...");
static BB: BBBuffer<QUEUE_SIZE> = BBBuffer::new();
let (mut tx, mut rx) = BB.try_split().unwrap();
let mut last_tx = Instant::now();
let mut last_rx = last_tx.clone();
let start_time = last_tx.clone();
let tx_thr = spawn(move || {
let mut txd_ct = 0;
let mut txd_ivl = 0;
for (i, ch) in chunks.iter().rev().enumerate() {
let mut semichunk = ch.to_owned();
// #[cfg(feature = "verbose")] println!("semi: {:?}", semichunk);
while !semichunk.is_empty() {
if last_tx.elapsed() > TIMEOUT_NODATA {
panic!("tx timeout, iter {}", i);
}
'sizer: for sz in (1..(semichunk.len() + 1)).rev() {
if let Ok(mut gr) = tx.grant_exact(sz) {
// how do you do this idiomatically?
(0..sz).for_each(|idx| {
gr[idx] = semichunk.remove(0);
});
gr.commit(sz);
// Update tracking
last_tx = Instant::now();
txd_ct += sz;
if (txd_ct / RPT_IVAL) > txd_ivl {
txd_ivl = txd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - rtxtx: {}", start_time.elapsed(), txd_ct);
}
break 'sizer;
}
}
}
}
});
let rx_thr = spawn(move || {
let mut rxd_ct = 0;
let mut rxd_ivl = 0;
for (_idx, i) in data_rx.drain(..).enumerate() {
'inner: loop {
if last_rx.elapsed() > TIMEOUT_NODATA {
panic!("rx timeout, iter {}", i);
}
let gr = match rx.read() {
Ok(gr) => gr,
Err(Error::InsufficientSize) => continue 'inner,
Err(_) => panic!(),
};
let act = gr[0] as u8;
let exp = i;
if act != exp {
#[cfg(feature = "verbose")]
println!("act: {:?}, exp: {:?}", act, exp);
#[cfg(feature = "verbose")]
println!("len: {:?}", gr.len());
#[cfg(feature = "verbose")]
println!("{:?}", gr);
panic!("RX Iter: {}, mod: {}", i, i % 6);
}
gr.release(1);
// Update tracking
last_rx = Instant::now();
rxd_ct += 1;
if (rxd_ct / RPT_IVAL) > rxd_ivl {
rxd_ivl = rxd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - rtxrx: {}", start_time.elapsed(), rxd_ct);
}
break 'inner;
}
}
});
tx_thr.join().unwrap();
rx_thr.join().unwrap();
}
#[test]
fn sanity_check() {
static BB: BBBuffer<QUEUE_SIZE> = BBBuffer::new();
let (mut tx, mut rx) = BB.try_split().unwrap();
let mut last_tx = Instant::now();
let mut last_rx = last_tx.clone();
let start_time = last_tx.clone();
let tx_thr = spawn(move || {
let mut txd_ct = 0;
let mut txd_ivl = 0;
for i in 0..ITERS {
'inner: loop {
if last_tx.elapsed() > TIMEOUT_NODATA {
panic!("tx timeout, iter {}", i);
}
match tx.grant_exact(1) {
Ok(mut gr) => {
gr[0] = (i & 0xFF) as u8;
gr.commit(1);
// Update tracking
last_tx = Instant::now();
txd_ct += 1;
if (txd_ct / RPT_IVAL) > txd_ivl {
txd_ivl = txd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - sctx: {}", start_time.elapsed(), txd_ct);
}
break 'inner;
}
Err(_) => {}
}
}
}
});
let rx_thr = spawn(move || {
let mut rxd_ct = 0;
let mut rxd_ivl = 0;
let mut i = 0;
while i < ITERS {
if last_rx.elapsed() > TIMEOUT_NODATA {
panic!("rx timeout, iter {}", i);
}
let gr = match rx.read() {
Ok(gr) => gr,
Err(Error::InsufficientSize) => continue,
Err(_) => panic!(),
};
for data in &*gr {
let act = *data;
let exp = (i & 0xFF) as u8;
if act != exp {
// #[cfg(feature = "verbose")] println!("baseptr: {}", panny);
#[cfg(feature = "verbose")]
println!("offendr: {:p}", &gr[0]);
#[cfg(feature = "verbose")]
println!("act: {:?}, exp: {:?}", act, exp);
#[cfg(feature = "verbose")]
println!("len: {:?}", gr.len());
#[cfg(feature = "verbose")]
println!("{:?}", &gr);
panic!("RX Iter: {}, mod: {}", i, i % 6);
}
i += 1;
}
let len = gr.len();
rxd_ct += len;
gr.release(len);
// Update tracking
last_rx = Instant::now();
if (rxd_ct / RPT_IVAL) > rxd_ivl {
rxd_ivl = rxd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - scrx: {}", start_time.elapsed(), rxd_ct);
}
}
});
tx_thr.join().unwrap();
rx_thr.join().unwrap();
}
#[test]
fn sanity_check_grant_max() {
static BB: BBBuffer<QUEUE_SIZE> = BBBuffer::new();
let (mut tx, mut rx) = BB.try_split().unwrap();
#[cfg(feature = "verbose")]
println!("SCGM: Generating Test Data...");
let gen_start = Instant::now();
let mut data_tx = (0..ITERS).map(|i| (i & 0xFF) as u8).collect::<Vec<_>>();
let mut data_rx = data_tx.clone();
#[cfg(feature = "verbose")]
println!("SCGM: Generated Test Data in: {:?}", gen_start.elapsed());
#[cfg(feature = "verbose")]
println!("SCGM: Starting Test...");
let mut last_tx = Instant::now();
let mut last_rx = last_tx.clone();
let start_time = last_tx.clone();
let tx_thr = spawn(move || {
let mut txd_ct = 0;
let mut txd_ivl = 0;
let mut trng = thread_rng();
while !data_tx.is_empty() {
'inner: loop {
if last_tx.elapsed() > TIMEOUT_NODATA {
panic!("tx timeout");
}
match tx
.grant_max_remaining(trng.gen_range(QUEUE_SIZE / 3, (2 * QUEUE_SIZE) / 3))
{
Ok(mut gr) => {
let sz = ::std::cmp::min(data_tx.len(), gr.len());
for i in 0..sz {
gr[i] = data_tx.pop().unwrap();
}
// Update tracking
last_tx = Instant::now();
txd_ct += sz;
if (txd_ct / RPT_IVAL) > txd_ivl {
txd_ivl = txd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - scgmtx: {}", start_time.elapsed(), txd_ct);
}
let len = gr.len();
gr.commit(len);
break 'inner;
}
Err(_) => {}
}
}
}
});
let rx_thr = spawn(move || {
let mut rxd_ct = 0;
let mut rxd_ivl = 0;
while !data_rx.is_empty() {
'inner: loop {
if last_rx.elapsed() > TIMEOUT_NODATA {
panic!("rx timeout");
}
let gr = match rx.read() {
Ok(gr) => gr,
Err(Error::InsufficientSize) => continue 'inner,
Err(_) => panic!(),
};
let act = gr[0];
let exp = data_rx.pop().unwrap();
if act != exp {
#[cfg(feature = "verbose")]
println!("offendr: {:p}", &gr[0]);
#[cfg(feature = "verbose")]
println!("act: {:?}, exp: {:?}", act, exp);
#[cfg(feature = "verbose")]
println!("len: {:?}", gr.len());
#[cfg(feature = "verbose")]
println!("{:?}", gr);
panic!("RX Iter: {}", rxd_ct);
}
gr.release(1);
// Update tracking
last_rx = Instant::now();
rxd_ct += 1;
if (rxd_ct / RPT_IVAL) > rxd_ivl {
rxd_ivl = rxd_ct / RPT_IVAL;
#[cfg(feature = "verbose")]
println!("{:?} - scgmrx: {}", start_time.elapsed(), rxd_ct);
}
break 'inner;
}
}
});
tx_thr.join().unwrap();
rx_thr.join().unwrap();
}
}
|
use super::*;
/*
fn lobby_helper(
db_conn: &DbConnection,
era: Era,
player_count: i32,
alias: &String,
author_id: UserId,
) -> Result<(), CommandError> {
*/
#[test]
fn add_lobby() {
let db_conn = DbConnection::test();
let initial_server_count = db_conn.count_servers();
let initial_lobby_state_count = db_conn.count_lobby_state();
lobby_helper(&db_conn, Era::Early, 5, "foo", UserId(0)).unwrap();
assert_eq!(db_conn.count_servers(), initial_server_count + 1);
assert_eq!(db_conn.count_lobby_state(), initial_lobby_state_count + 1);
}
#[test]
fn add_two_lobbies() {
let db_conn = DbConnection::test();
let initial_server_count = db_conn.count_servers();
let initial_lobby_state_count = db_conn.count_lobby_state();
lobby_helper(&db_conn, Era::Early, 5, "foo", UserId(4)).unwrap();
lobby_helper(&db_conn, Era::Early, 5, "bar", UserId(4)).unwrap();
assert_eq!(db_conn.count_servers(), initial_server_count + 2);
assert_eq!(db_conn.count_lobby_state(), initial_lobby_state_count + 2);
}
/*
sqlite> .schema
CREATE TABLE server_players (
server_id int NOT NULL REFERENCES game_servers(id),
player_id int NOT NULL REFERENCES players(id),
nation_id int NOT NULL,
CONSTRAINT server_nation_unique UNIQUE (server_id, nation_id)
);
CREATE TABLE started_servers (
id INTEGER NOT NULL PRIMARY KEY,
address VARCHAR(255) NOT NULL,
last_seen_turn int NOT NULL,
CONSTRAINT server_address_unique UNIQUE (address)
);
CREATE TABLE lobbies (
id INTEGER NOT NULL PRIMARY KEY,
owner_id int NOT NULL REFERENCES players(id),
player_count int NOT NULL,
era int NOT NULL
, description TEXT);
CREATE TABLE game_servers (
id INTEGER NOT NULL PRIMARY KEY,
alias VARCHAR(255) NOT NULL,
started_server_id int REFERENCES started_servers(id),
lobby_id int REFERENCES lobbies(id),
CONSTRAINT server_alias_unique UNIQUE (alias)
);
CREATE TABLE players (
id INTEGER NOT NULL PRIMARY KEY,
discord_user_id int NOT NULL,
turn_notifications BOOLEAN NOT NULL,
CONSTRAINT discord_user_id_unique UNIQUE(discord_user_id)
);
CREATE TABLE __migrant_migrations(tag text unique);
*/
|
use druid::shell::{runloop, WindowBuilder};
use druid::widget::{ActionWrapper, Button, Column, Flex, Label, Padding, Row, TextBox};
use druid::{BoxedWidget, Data, UiMain, UiState, WidgetPod};
use druid::kurbo::{Rect, Size};
use druid::{
Action, BaseState, BoxConstraints, Env, Event, EventCtx, LayoutCtx, Lens, LensWrap, PaintCtx,
UpdateCtx, Widget,
};
use std::collections::BTreeMap;
#[derive(Clone, Default, PartialEq, Debug)]
struct TodoItem {
title: String,
complete: bool,
}
impl TodoItem {
fn new(title: impl Into<String>) -> Self {
Self {
title: title.into(),
..Default::default()
}
}
}
impl Data for TodoItem {
fn same(&self, other: &Self) -> bool {
self == other
}
}
#[derive(Clone, PartialEq, Debug)]
enum TodoFilter {
All,
Active,
Completed,
}
impl Default for TodoFilter {
fn default() -> Self {
TodoFilter::All
}
}
#[derive(Clone, Default, Debug)]
struct TodoState {
todos: BTreeMap<usize, TodoItem>,
filter: TodoFilter,
next_todo: String,
next_todo_id: usize,
}
impl Data for TodoState {
fn same(&self, other: &Self) -> bool {
self.todos.values().len() == other.todos.len()
&& self
.todos
.values()
.zip(other.todos.values())
.fold(true, |a, (b, c)| a && b.same(c))
&& self.filter == other.filter
&& self.next_todo == other.next_todo
}
}
impl ListData<TodoItem> for TodoState {
fn push(&mut self, item: TodoItem) {
self.next_todo_id += 1;
self.todos.insert(self.next_todo_id, item);
}
fn items(&self) -> BTreeMap<usize, TodoItem> {
match self.filter {
TodoFilter::All => self.todos.clone(),
TodoFilter::Active => self
.todos
.iter()
.filter(|(_, todo)| !todo.complete)
.map(|(&id, item)| (id, item.clone()))
.collect(),
TodoFilter::Completed => self
.todos
.iter()
.filter(|(_, todo)| todo.complete)
.map(|(&id, item)| (id, item.clone()))
.collect(),
}
}
fn remove(&mut self, id: usize) {
self.todos.remove(&id);
}
}
fn main() {
druid_shell::init();
let mut run_loop = runloop::RunLoop::new();
let mut builder = WindowBuilder::new();
builder.set_title("Todo");
let mut col = Column::new();
let textbox = TextBox::new(200.);
let add_button = ActionWrapper::new(
Button::new("Add New"),
move |state: &mut TodoState, _env| {
state.push(TodoItem::new(state.next_todo.clone()));
state.next_todo = "".into();
},
);
let mut new_todo_form = Row::new();
new_todo_form.add_child(
Padding::uniform(5.0, LensWrap::new(textbox, NextTodoLens)),
4.0,
);
new_todo_form.add_child(Padding::uniform(5.0, add_button), 1.0);
col.add_child(new_todo_form, 1.0);
let list = List::new(move |data: &TodoState, id: usize| {
let mut row = Row::new();
row.add_child(
ActionWrapper::new(
Button::new(if data.todos[&id].complete {
"✓".to_string()
} else {
"O".to_string()
}),
move |state: &mut TodoState, _env| {
state
.todos
.get_mut(&id)
.map(|item| item.complete = !item.complete);
},
),
1.0,
);
row.add_child(
Label::new(format!("{}", data.todos[&id].title.clone())),
9.0,
);
let delete_button =
ActionWrapper::new(Button::new("D"), move |state: &mut TodoState, _env| {
state.remove(id);
});
ListRow::new(
WidgetPod::new(row).boxed(),
WidgetPod::new(delete_button).boxed(),
)
});
col.add_child(list, 5.0);
let mut filter_row = Row::new();
let all_button = ActionWrapper::new(Button::new("All"), move |state: &mut TodoState, _env| {
state.filter = TodoFilter::All;
});
filter_row.add_child(Padding::uniform(5.0, all_button), 1.0);
let active_button =
ActionWrapper::new(Button::new("Active"), move |state: &mut TodoState, _env| {
state.filter = TodoFilter::Active;
});
filter_row.add_child(Padding::uniform(5.0, active_button), 1.0);
let completed_button = ActionWrapper::new(
Button::new("Completed"),
move |state: &mut TodoState, _env| {
state.filter = TodoFilter::Completed;
},
);
filter_row.add_child(Padding::uniform(5.0, completed_button), 1.0);
col.add_child(filter_row, 1.0);
let state = TodoState::default();
let state = UiState::new(col, state);
builder.set_handler(Box::new(UiMain::new(state)));
builder.build().unwrap().show();
run_loop.run();
}
// Lenses
struct NextTodoLens;
impl Lens<TodoState, String> for NextTodoLens {
fn get<'a>(&self, data: &'a TodoState) -> &'a String {
&data.next_todo
}
fn with_mut<V, F: FnOnce(&mut String) -> V>(&self, data: &mut TodoState, f: F) -> V {
f(&mut data.next_todo)
}
}
// List widgets
trait ListData<T: Data> {
fn push(&mut self, item: T);
fn items(&self) -> BTreeMap<usize, T>;
fn remove(&mut self, index: usize);
}
struct ListRow<T: Data> {
child: BoxedWidget<T>,
delete_button: BoxedWidget<T>,
hot: bool,
}
impl<T: Data> ListRow<T> {
fn new(child: BoxedWidget<T>, delete_button: BoxedWidget<T>) -> Self {
Self {
child,
delete_button,
hot: false,
}
}
}
impl<T: Data> Widget<T> for ListRow<T> {
fn paint(&mut self, paint_ctx: &mut PaintCtx, _base_state: &BaseState, data: &T, env: &Env) {
self.child.paint_with_offset(paint_ctx, data, env);
if self.hot {
self.delete_button.paint_with_offset(paint_ctx, data, env);
}
}
fn layout(
&mut self,
layout_ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &T,
env: &Env,
) -> Size {
// carve out 30px for the delete button
let child_rect = Rect::new(0.0, 0.0, bc.max().width - 30.0, bc.max().height);
self.child.set_layout_rect(child_rect);
self.child.layout(
layout_ctx,
&BoxConstraints::tight(child_rect.size()),
data,
env,
);
self.delete_button.set_layout_rect(Rect::new(
bc.max().width - 30.0,
0.0,
bc.max().width,
bc.max().height,
));
bc.max()
}
fn event(
&mut self,
event: &Event,
ctx: &mut EventCtx,
data: &mut T,
env: &Env,
) -> Option<Action> {
match event {
Event::HotChanged(hot) => {
self.hot = *hot;
ctx.invalidate();
}
_ => {}
}
self.child.event(event, ctx, data, env);
self.delete_button.event(event, ctx, data, env)
}
fn update(&mut self, ctx: &mut UpdateCtx, _old_data: Option<&T>, data: &T, env: &Env) {
self.child.update(ctx, data, env);
self.delete_button.update(ctx, data, env);
}
}
struct List<T: Data, L: ListData<T> + Data, W: Widget<L>> {
children: Flex<L>,
child_creator: Box<dyn Fn(&L, usize) -> W>,
phantom: std::marker::PhantomData<T>,
}
impl<T: Data, L: ListData<T> + Data, W: Widget<L>> List<T, L, W> {
fn new(child_creator: impl Fn(&L, usize) -> W + 'static) -> Self {
Self {
children: Column::new(),
child_creator: Box::new(child_creator),
phantom: Default::default(),
}
}
}
impl<T: Data + 'static, L: ListData<T> + Data + 'static, W: Widget<L> + 'static> Widget<L>
for List<T, L, W>
{
fn paint(&mut self, paint_ctx: &mut PaintCtx, base_state: &BaseState, data: &L, env: &Env) {
self.children.paint(paint_ctx, base_state, data, env)
}
fn layout(
&mut self,
layout_ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &L,
env: &Env,
) -> Size {
self.children.layout(layout_ctx, bc, data, env)
}
fn event(
&mut self,
event: &Event,
ctx: &mut EventCtx,
data: &mut L,
env: &Env,
) -> Option<Action> {
self.children.event(event, ctx, data, env)
}
fn update(&mut self, ctx: &mut UpdateCtx, old_data: Option<&L>, data: &L, env: &Env) {
self.children = Column::new();
for (id, _) in data.items() {
self.children.add_child((self.child_creator)(data, id), 1.0);
}
ctx.invalidate();
self.children.update(ctx, old_data, data, env);
}
}
|
use std::time::Duration;
use color_eyre::Result;
use futures::Future;
use prost::bytes::Buf;
use prost::Message;
pub trait MessageExt<M> {
fn try_parse<B: AsRef<[u8]>>(b: B) -> color_eyre::Result<M>;
fn to_bytes(&self) -> Vec<u8>;
}
pub trait BufExt {
fn parse_into<M: Message + Default>(self) -> Result<M>;
}
impl<B> BufExt for B
where
B: Buf,
{
fn parse_into<M: Message + Default>(self) -> Result<M> {
M::decode(self).map_err(Into::into)
}
}
pub trait ResultExt: Sized {
type Output;
fn log_err(self) -> Self::Output;
}
impl<T, E> ResultExt for Result<T, E>
where
E: std::fmt::Display,
{
type Output = ();
fn log_err(self) {
match self {
Ok(_) => {}
Err(err) => log::error!("Error occurred: {}", err),
}
}
}
pub trait FutureExt: Sized {
fn timeout(self, duration: Duration) -> tokio::time::Timeout<Self>;
}
impl<F> FutureExt for F
where
F: Future,
{
fn timeout(self, duration: Duration) -> tokio::time::Timeout<Self> {
tokio::time::timeout(duration, self)
}
}
impl<M: Message + Default> MessageExt<M> for M {
fn try_parse<B: AsRef<[u8]>>(buf: B) -> color_eyre::Result<M> {
M::decode(buf.as_ref()).map_err(Into::into)
}
fn to_bytes(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(self.encoded_len());
self.encode(&mut buf).unwrap();
buf
}
}
|
//
// RaphPdf
//
// @copyright Copyright (c) 2020-2020 Grégory Muller
// @license https://www.apache.org/licenses/LICENSE-2.0
// @link https://github.com/debitux/raphpdf
// @since 0.1.0
//
extern crate pdf_form_ids;
pub mod iris;
pub mod symag;
use iris::{fill_pdf_iris, read_file_iris, Iris};
use symag::{fill_pdf_sygma, read_file_sygma, Symag};
use pdf_form_ids::Form;
use std::error::Error;
use std::fs;
use std::io::{stderr, Result, Write};
use std::path::{Path, PathBuf};
fn print_error(mut err: &dyn Error) {
let _ = writeln!(stderr(), "erreur : {}", err);
while let Some(cause) = err.source() {
let _ = writeln!(stderr(), "cause : {}", cause);
err = cause;
}
}
fn _affiche_champ(rep: &str) {
let path = Path::new(&rep);
let form = Form::load(&path).expect("error don't load the gabarit");
let field_names = form.get_all_names();
let mut i = 0;
for ele in field_names {
println!("{}: {:?}", i, ele);
i = i + 1;
}
}
fn recup(symag_liste: &mut Vec<PathBuf>, iris_liste: &mut Vec<PathBuf>) -> Result<()> {
for entry in fs::read_dir("original/")? {
let dir = entry?;
let en = dir.path();
let ne = &en.as_path().display().to_string();
if ne.contains("symag") {
symag_liste.push(en);
} else if ne.contains("iris") {
iris_liste.push(en);
}
}
Ok(())
}
fn main() {
//_affiche_champ("GabaritSymag.pdf");
//_affiche_champ("GabaritIris.pdf");
let mut sym = Symag::new();
let mut iri = Iris::new();
let mut symag_liste = Vec::new();
let mut iris_liste = Vec::new();
if let Err(err) = recup(&mut symag_liste, &mut iris_liste) {
print_error(&err);
std::process::exit(1);
}
for entry in symag_liste {
if let Err(err) = read_file_sygma(&mut sym, &entry) {
print_error(&err);
std::process::exit(1);
}
if let Err(err) = fill_pdf_sygma(&mut sym) {
print_error(&err);
std::process::exit(1);
}
symag::clear(&mut sym);
}
for entry in iris_liste {
if let Err(err) = read_file_iris(&mut iri, &entry) {
print_error(&err);
std::process::exit(1);
}
if let Err(err) = fill_pdf_iris(&mut iri) {
print_error(&err);
std::process::exit(1);
}
iris::clear(&mut iri);
}
}
|
use super::TreeNode;
use std::cell::RefCell;
use std::rc::Rc;
pub struct CodecDFS;
impl CodecDFS {
pub fn new() -> Self {
CodecDFS {}
}
/// 深度优先,先序遍历
/// DFS trave
/// BNF Expr
/// T->'None' | val,T,T | (val)
/// (val) means node without left and right child node, 'None' this is a empty node
fn serialize(&self, root: Option<Rc<RefCell<TreeNode>>>) -> String {
if root.is_none() {
return String::from("None");
}
let node = root.unwrap();
if node.borrow().left.is_none() && node.borrow().right.is_none() {
return format!("({})", node.borrow().val.to_string());
}
format!(
"{},{},{}",
node.borrow().val.to_string(),
Self::serialize(&self, node.borrow().left.clone()),
Self::serialize(&self, node.borrow().right.clone())
)
}
fn deserialize_impl(&self, data: &Vec<&str>, cur: &mut usize) -> Option<Rc<RefCell<TreeNode>>> {
//check empty
if *cur == data.len() {
return None;
}
if data[*cur] == "None" {
*cur += 1;
return None;
} else if data[*cur].as_bytes()[0] == b'(' {
let mut ans = 0;
let mut flags = 1;
for c in &data[*cur].as_bytes()[1..data[*cur].as_bytes().len() - 1] {
if *c == b'-' {
flags = -1;
continue;
}
ans *= 10;
ans += (*c - b'0') as i32;
}
*cur += 1;
return Some(Rc::new(RefCell::new(TreeNode::new(ans * flags))));
}
let mut val = 0;
let mut flags = 1;
for c in data[*cur].as_bytes() {
if *c == b'-' {
flags = -1;
continue;
}
val *= 10;
val += (*c - b'0') as i32;
}
let node = Rc::new(RefCell::new(TreeNode::new(val * flags)));
*cur += 1;
let left = Self::deserialize_impl(&self, data, cur);
node.borrow_mut().left = left;
let right = Self::deserialize_impl(&self, data, cur);
node.borrow_mut().right = right;
Some(node)
}
fn deserialize(&self, data: String) -> Option<Rc<RefCell<TreeNode>>> {
let vals: Vec<_> = data.split(",").collect();
let mut cur = 0;
Self::deserialize_impl(&self, &vals, &mut cur)
}
}
impl super::TreeCodec for CodecDFS {
fn serialize(&self, root: Option<Rc<RefCell<TreeNode>>>) -> String {
self.serialize(root)
}
fn deserialize(&self, data: String) -> Option<Rc<RefCell<TreeNode>>> {
self.deserialize(data)
}
}
|
use libp2p::mdns::{Mdns, MdnsEvent};
use libp2p::swarm::NetworkBehaviourEventProcess;
use libp2p::NetworkBehaviour;
use tokio::prelude::{AsyncRead, AsyncWrite};
use crate::behavior::{Pbft, PbftEvent};
#[derive(NetworkBehaviour)]
pub struct NetworkBehaviourComposer<TSubstream: AsyncRead + AsyncWrite> {
mdns: Mdns<TSubstream>,
pub pbft: Pbft<TSubstream>,
}
impl<TSubstream: AsyncRead + AsyncWrite> NetworkBehaviourComposer<TSubstream> {
pub fn new(mdns: Mdns<TSubstream>, pbft: Pbft<TSubstream>) -> Self {
Self {
mdns,
pbft,
}
}
}
impl<TSubstream: AsyncRead + AsyncWrite> NetworkBehaviourEventProcess<MdnsEvent> for NetworkBehaviourComposer<TSubstream>
{
fn inject_event(&mut self, event: MdnsEvent) {
match event {
MdnsEvent::Discovered(list) => {
for (peer_id, address) in list {
if !self.pbft.has_peer(&peer_id) {
println!("[NetworkBehaviourComposer::inject_event] [MdnsEvent::Discovered] The node has been discovered: {:?}", address);
self.pbft.add_peer(&peer_id, &address);
}
}
},
MdnsEvent::Expired(list) => {
for (peer_id, addr) in list {
if self.pbft.has_peer(&peer_id) {
println!("[NetworkBehaviourComposer::inject_event] [MdnsEvent::Expired] The node has been expired: {:?}", addr);
// TODO
}
}
}
}
}
}
impl<TSubstream: AsyncRead + AsyncWrite> NetworkBehaviourEventProcess<PbftEvent> for NetworkBehaviourComposer<TSubstream>
{
fn inject_event(&mut self, event: PbftEvent) {
println!("inject_event : PbftEvent: {:?}", event);
}
}
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Comment {
pub id: i64,
#[serde(rename = "parentId")]
pub parent_id: i64,
pub content: String,
#[serde(rename = "createdAt")]
pub created_at: String,
#[serde(rename = "editedAt")]
pub edited_at: String,
pub user: super::User,
}
|
pub const WORDLIST: &'static [&'static str] = &[
"абажур",
"абзац",
"абонент",
"абрикос",
"абсурд",
"авангард",
"август",
"авиация",
"авоська",
"автор",
"агат",
"агент",
"агитатор",
"агнец",
"агония",
"агрегат",
"адвокат",
"адмирал",
"адрес",
"ажиотаж",
"азарт",
"азбука",
"азот",
"аист",
"айсберг",
"академия",
"аквариум",
"аккорд",
"акробат",
"аксиома",
"актер",
"акула",
"акция",
"алгоритм",
"алебарда",
"аллея",
"алмаз",
"алтарь",
"алфавит",
"алхимик",
"алый",
"альбом",
"алюминий",
"амбар",
"аметист",
"амнезия",
"ампула",
"амфора",
"анализ",
"ангел",
"анекдот",
"анимация",
"анкета",
"аномалия",
"ансамбль",
"антенна",
"апатия",
"апельсин",
"апофеоз",
"аппарат",
"апрель",
"аптека",
"арабский",
"арбуз",
"аргумент",
"арест",
"ария",
"арка",
"армия",
"аромат",
"арсенал",
"артист",
"архив",
"аршин",
"асбест",
"аскетизм",
"аспект",
"ассорти",
"астроном",
"асфальт",
"атака",
"ателье",
"атлас",
"атом",
"атрибут",
"аудитор",
"аукцион",
"аура",
"афера",
"афиша",
"ахинея",
"ацетон",
"аэропорт",
"бабушка",
"багаж",
"бадья",
"база",
"баклажан",
"балкон",
"бампер",
"банк",
"барон",
"бассейн",
"батарея",
"бахрома",
"башня",
"баян",
"бегство",
"бедро",
"бездна",
"бекон",
"белый",
"бензин",
"берег",
"беседа",
"бетонный",
"биатлон",
"библия",
"бивень",
"бигуди",
"бидон",
"бизнес",
"бикини",
"билет",
"бинокль",
"биология",
"биржа",
"бисер",
"битва",
"бицепс",
"благо",
"бледный",
"близкий",
"блок",
"блуждать",
"блюдо",
"бляха",
"бобер",
"богатый",
"бодрый",
"боевой",
"бокал",
"большой",
"борьба",
"босой",
"ботинок",
"боцман",
"бочка",
"боярин",
"брать",
"бревно",
"бригада",
"бросать",
"брызги",
"брюки",
"бублик",
"бугор",
"будущее",
"буква",
"бульвар",
"бумага",
"бунт",
"бурный",
"бусы",
"бутылка",
"буфет",
"бухта",
"бушлат",
"бывалый",
"быль",
"быстрый",
"быть",
"бюджет",
"бюро",
"бюст",
"вагон",
"важный",
"ваза",
"вакцина",
"валюта",
"вампир",
"ванная",
"вариант",
"вассал",
"вата",
"вафля",
"вахта",
"вдова",
"вдыхать",
"ведущий",
"веер",
"вежливый",
"везти",
"веко",
"великий",
"вена",
"верить",
"веселый",
"ветер",
"вечер",
"вешать",
"вещь",
"веяние",
"взаимный",
"взбучка",
"взвод",
"взгляд",
"вздыхать",
"взлетать",
"взмах",
"взнос",
"взор",
"взрыв",
"взывать",
"взятка",
"вибрация",
"визит",
"вилка",
"вино",
"вирус",
"висеть",
"витрина",
"вихрь",
"вишневый",
"включать",
"вкус",
"власть",
"влечь",
"влияние",
"влюблять",
"внешний",
"внимание",
"внук",
"внятный",
"вода",
"воевать",
"вождь",
"воздух",
"войти",
"вокзал",
"волос",
"вопрос",
"ворота",
"восток",
"впадать",
"впускать",
"врач",
"время",
"вручать",
"всадник",
"всеобщий",
"вспышка",
"встреча",
"вторник",
"вулкан",
"вурдалак",
"входить",
"въезд",
"выбор",
"вывод",
"выгодный",
"выделять",
"выезжать",
"выживать",
"вызывать",
"выигрыш",
"вылезать",
"выносить",
"выпивать",
"высокий",
"выходить",
"вычет",
"вышка",
"выяснять",
"вязать",
"вялый",
"гавань",
"гадать",
"газета",
"гаишник",
"галстук",
"гамма",
"гарантия",
"гастроли",
"гвардия",
"гвоздь",
"гектар",
"гель",
"генерал",
"геолог",
"герой",
"гешефт",
"гибель",
"гигант",
"гильза",
"гимн",
"гипотеза",
"гитара",
"глаз",
"глина",
"глоток",
"глубокий",
"глыба",
"глядеть",
"гнать",
"гнев",
"гнить",
"гном",
"гнуть",
"говорить",
"годовой",
"голова",
"гонка",
"город",
"гость",
"готовый",
"граница",
"грех",
"гриб",
"громкий",
"группа",
"грызть",
"грязный",
"губа",
"гудеть",
"гулять",
"гуманный",
"густой",
"гуща",
"давать",
"далекий",
"дама",
"данные",
"дарить",
"дать",
"дача",
"дверь",
"движение",
"двор",
"дебют",
"девушка",
"дедушка",
"дежурный",
"дезертир",
"действие",
"декабрь",
"дело",
"демократ",
"день",
"депутат",
"держать",
"десяток",
"детский",
"дефицит",
"дешевый",
"деятель",
"джаз",
"джинсы",
"джунгли",
"диалог",
"диван",
"диета",
"дизайн",
"дикий",
"динамика",
"диплом",
"директор",
"диск",
"дитя",
"дичь",
"длинный",
"дневник",
"добрый",
"доверие",
"договор",
"дождь",
"доза",
"документ",
"должен",
"домашний",
"допрос",
"дорога",
"доход",
"доцент",
"дочь",
"дощатый",
"драка",
"древний",
"дрожать",
"друг",
"дрянь",
"дубовый",
"дуга",
"дудка",
"дукат",
"дуло",
"думать",
"дупло",
"дурак",
"дуть",
"духи",
"душа",
"дуэт",
"дымить",
"дыня",
"дыра",
"дыханье",
"дышать",
"дьявол",
"дюжина",
"дюйм",
"дюна",
"дядя",
"дятел",
"егерь",
"единый",
"едкий",
"ежевика",
"ежик",
"езда",
"елка",
"емкость",
"ерунда",
"ехать",
"жадный",
"жажда",
"жалеть",
"жанр",
"жара",
"жать",
"жгучий",
"ждать",
"жевать",
"желание",
"жемчуг",
"женщина",
"жертва",
"жесткий",
"жечь",
"живой",
"жидкость",
"жизнь",
"жилье",
"жирный",
"житель",
"журнал",
"жюри",
"забывать",
"завод",
"загадка",
"задача",
"зажечь",
"зайти",
"закон",
"замечать",
"занимать",
"западный",
"зарплата",
"засыпать",
"затрата",
"захват",
"зацепка",
"зачет",
"защита",
"заявка",
"звать",
"звезда",
"звонить",
"звук",
"здание",
"здешний",
"здоровье",
"зебра",
"зевать",
"зеленый",
"земля",
"зенит",
"зеркало",
"зефир",
"зигзаг",
"зима",
"зиять",
"злак",
"злой",
"змея",
"знать",
"зной",
"зодчий",
"золотой",
"зомби",
"зона",
"зоопарк",
"зоркий",
"зрачок",
"зрение",
"зритель",
"зубной",
"зыбкий",
"зять",
"игла",
"иголка",
"играть",
"идея",
"идиот",
"идол",
"идти",
"иерархия",
"избрать",
"известие",
"изгонять",
"издание",
"излагать",
"изменять",
"износ",
"изоляция",
"изрядный",
"изучать",
"изымать",
"изящный",
"икона",
"икра",
"иллюзия",
"имбирь",
"иметь",
"имидж",
"иммунный",
"империя",
"инвестор",
"индивид",
"инерция",
"инженер",
"иномарка",
"институт",
"интерес",
"инфекция",
"инцидент",
"ипподром",
"ирис",
"ирония",
"искать",
"история",
"исходить",
"исчезать",
"итог",
"июль",
"июнь",
"кабинет",
"кавалер",
"кадр",
"казарма",
"кайф",
"кактус",
"калитка",
"камень",
"канал",
"капитан",
"картина",
"касса",
"катер",
"кафе",
"качество",
"каша",
"каюта",
"квартира",
"квинтет",
"квота",
"кедр",
"кекс",
"кенгуру",
"кепка",
"керосин",
"кетчуп",
"кефир",
"кибитка",
"кивнуть",
"кидать",
"километр",
"кино",
"киоск",
"кипеть",
"кирпич",
"кисть",
"китаец",
"класс",
"клетка",
"клиент",
"клоун",
"клуб",
"клык",
"ключ",
"клятва",
"книга",
"кнопка",
"кнут",
"князь",
"кобура",
"ковер",
"коготь",
"кодекс",
"кожа",
"козел",
"койка",
"коктейль",
"колено",
"компания",
"конец",
"копейка",
"короткий",
"костюм",
"котел",
"кофе",
"кошка",
"красный",
"кресло",
"кричать",
"кровь",
"крупный",
"крыша",
"крючок",
"кубок",
"кувшин",
"кудрявый",
"кузов",
"кукла",
"культура",
"кумир",
"купить",
"курс",
"кусок",
"кухня",
"куча",
"кушать",
"кювет",
"лабиринт",
"лавка",
"лагерь",
"ладонь",
"лазерный",
"лайнер",
"лакей",
"лампа",
"ландшафт",
"лапа",
"ларек",
"ласковый",
"лауреат",
"лачуга",
"лаять",
"лгать",
"лебедь",
"левый",
"легкий",
"ледяной",
"лежать",
"лекция",
"лента",
"лепесток",
"лесной",
"лето",
"лечь",
"леший",
"лживый",
"либерал",
"ливень",
"лига",
"лидер",
"ликовать",
"лиловый",
"лимон",
"линия",
"липа",
"лирика",
"лист",
"литр",
"лифт",
"лихой",
"лицо",
"личный",
"лишний",
"лобовой",
"ловить",
"логика",
"лодка",
"ложка",
"лозунг",
"локоть",
"ломать",
"лоно",
"лопата",
"лорд",
"лось",
"лоток",
"лохматый",
"лошадь",
"лужа",
"лукавый",
"луна",
"лупить",
"лучший",
"лыжный",
"лысый",
"львиный",
"льгота",
"льдина",
"любить",
"людской",
"люстра",
"лютый",
"лягушка",
"магазин",
"мадам",
"мазать",
"майор",
"максимум",
"мальчик",
"манера",
"март",
"масса",
"мать",
"мафия",
"махать",
"мачта",
"машина",
"маэстро",
"маяк",
"мгла",
"мебель",
"медведь",
"мелкий",
"мемуары",
"менять",
"мера",
"место",
"метод",
"механизм",
"мечтать",
"мешать",
"миграция",
"мизинец",
"микрофон",
"миллион",
"минута",
"мировой",
"миссия",
"митинг",
"мишень",
"младший",
"мнение",
"мнимый",
"могила",
"модель",
"мозг",
"мойка",
"мокрый",
"молодой",
"момент",
"монах",
"море",
"мост",
"мотор",
"мохнатый",
"мочь",
"мошенник",
"мощный",
"мрачный",
"мстить",
"мудрый",
"мужчина",
"музыка",
"мука",
"мумия",
"мундир",
"муравей",
"мусор",
"мутный",
"муфта",
"муха",
"мучить",
"мушкетер",
"мыло",
"мысль",
"мыть",
"мычать",
"мышь",
"мэтр",
"мюзикл",
"мягкий",
"мякиш",
"мясо",
"мятый",
"мячик",
"набор",
"навык",
"нагрузка",
"надежда",
"наемный",
"нажать",
"называть",
"наивный",
"накрыть",
"налог",
"намерен",
"наносить",
"написать",
"народ",
"натура",
"наука",
"нация",
"начать",
"небо",
"невеста",
"негодяй",
"неделя",
"нежный",
"незнание",
"нелепый",
"немалый",
"неправда",
"нервный",
"нести",
"нефть",
"нехватка",
"нечистый",
"неясный",
"нива",
"нижний",
"низкий",
"никель",
"нирвана",
"нить",
"ничья",
"ниша",
"нищий",
"новый",
"нога",
"ножницы",
"ноздря",
"ноль",
"номер",
"норма",
"нота",
"ночь",
"ноша",
"ноябрь",
"нрав",
"нужный",
"нутро",
"нынешний",
"нырнуть",
"ныть",
"нюанс",
"нюхать",
"няня",
"оазис",
"обаяние",
"обвинять",
"обгонять",
"обещать",
"обжигать",
"обзор",
"обида",
"область",
"обмен",
"обнимать",
"оборона",
"образ",
"обучение",
"обходить",
"обширный",
"общий",
"объект",
"обычный",
"обязать",
"овальный",
"овес",
"овощи",
"овраг",
"овца",
"овчарка",
"огненный",
"огонь",
"огромный",
"огурец",
"одежда",
"одинокий",
"одобрить",
"ожидать",
"ожог",
"озарение",
"озеро",
"означать",
"оказать",
"океан",
"оклад",
"окно",
"округ",
"октябрь",
"окурок",
"олень",
"опасный",
"операция",
"описать",
"оплата",
"опора",
"оппонент",
"опрос",
"оптимизм",
"опускать",
"опыт",
"орать",
"орбита",
"орган",
"орден",
"орел",
"оригинал",
"оркестр",
"орнамент",
"оружие",
"осадок",
"освещать",
"осень",
"осина",
"осколок",
"осмотр",
"основной",
"особый",
"осуждать",
"отбор",
"отвечать",
"отдать",
"отец",
"отзыв",
"открытие",
"отмечать",
"относить",
"отпуск",
"отрасль",
"отставка",
"оттенок",
"отходить",
"отчет",
"отъезд",
"офицер",
"охапка",
"охота",
"охрана",
"оценка",
"очаг",
"очередь",
"очищать",
"очки",
"ошейник",
"ошибка",
"ощущение",
"павильон",
"падать",
"паек",
"пакет",
"палец",
"память",
"панель",
"папка",
"партия",
"паспорт",
"патрон",
"пауза",
"пафос",
"пахнуть",
"пациент",
"пачка",
"пашня",
"певец",
"педагог",
"пейзаж",
"пельмень",
"пенсия",
"пепел",
"период",
"песня",
"петля",
"пехота",
"печать",
"пешеход",
"пещера",
"пианист",
"пиво",
"пиджак",
"пиковый",
"пилот",
"пионер",
"пирог",
"писать",
"пить",
"пицца",
"пишущий",
"пища",
"план",
"плечо",
"плита",
"плохой",
"плыть",
"плюс",
"пляж",
"победа",
"повод",
"погода",
"подумать",
"поехать",
"пожимать",
"позиция",
"поиск",
"покой",
"получать",
"помнить",
"пони",
"поощрять",
"попадать",
"порядок",
"пост",
"поток",
"похожий",
"поцелуй",
"почва",
"пощечина",
"поэт",
"пояснить",
"право",
"предмет",
"проблема",
"пруд",
"прыгать",
"прямой",
"психолог",
"птица",
"публика",
"пугать",
"пудра",
"пузырь",
"пуля",
"пункт",
"пурга",
"пустой",
"путь",
"пухлый",
"пучок",
"пушистый",
"пчела",
"пшеница",
"пыль",
"пытка",
"пыхтеть",
"пышный",
"пьеса",
"пьяный",
"пятно",
"работа",
"равный",
"радость",
"развитие",
"район",
"ракета",
"рамка",
"ранний",
"рапорт",
"рассказ",
"раунд",
"рация",
"рвать",
"реальный",
"ребенок",
"реветь",
"регион",
"редакция",
"реестр",
"режим",
"резкий",
"рейтинг",
"река",
"религия",
"ремонт",
"рента",
"реплика",
"ресурс",
"реформа",
"рецепт",
"речь",
"решение",
"ржавый",
"рисунок",
"ритм",
"рифма",
"робкий",
"ровный",
"рогатый",
"родитель",
"рождение",
"розовый",
"роковой",
"роль",
"роман",
"ронять",
"рост",
"рота",
"роща",
"рояль",
"рубль",
"ругать",
"руда",
"ружье",
"руины",
"рука",
"руль",
"румяный",
"русский",
"ручка",
"рыба",
"рывок",
"рыдать",
"рыжий",
"рынок",
"рысь",
"рыть",
"рыхлый",
"рыцарь",
"рычаг",
"рюкзак",
"рюмка",
"рябой",
"рядовой",
"сабля",
"садовый",
"сажать",
"салон",
"самолет",
"сани",
"сапог",
"сарай",
"сатира",
"сауна",
"сахар",
"сбегать",
"сбивать",
"сбор",
"сбыт",
"свадьба",
"свет",
"свидание",
"свобода",
"связь",
"сгорать",
"сдвигать",
"сеанс",
"северный",
"сегмент",
"седой",
"сезон",
"сейф",
"секунда",
"сельский",
"семья",
"сентябрь",
"сердце",
"сеть",
"сечение",
"сеять",
"сигнал",
"сидеть",
"сизый",
"сила",
"символ",
"синий",
"сирота",
"система",
"ситуация",
"сиять",
"сказать",
"скважина",
"скелет",
"скидка",
"склад",
"скорый",
"скрывать",
"скучный",
"слава",
"слеза",
"слияние",
"слово",
"случай",
"слышать",
"слюна",
"смех",
"смирение",
"смотреть",
"смутный",
"смысл",
"смятение",
"снаряд",
"снег",
"снижение",
"сносить",
"снять",
"событие",
"совет",
"согласие",
"сожалеть",
"сойти",
"сокол",
"солнце",
"сомнение",
"сонный",
"сообщать",
"соперник",
"сорт",
"состав",
"сотня",
"соус",
"социолог",
"сочинять",
"союз",
"спать",
"спешить",
"спина",
"сплошной",
"способ",
"спутник",
"средство",
"срок",
"срывать",
"стать",
"ствол",
"стена",
"стихи",
"сторона",
"страна",
"студент",
"стыд",
"субъект",
"сувенир",
"сугроб",
"судьба",
"суета",
"суждение",
"сукно",
"сулить",
"сумма",
"сунуть",
"супруг",
"суровый",
"сустав",
"суть",
"сухой",
"суша",
"существо",
"сфера",
"схема",
"сцена",
"счастье",
"счет",
"считать",
"сшивать",
"съезд",
"сынок",
"сыпать",
"сырье",
"сытый",
"сыщик",
"сюжет",
"сюрприз",
"таблица",
"таежный",
"таинство",
"тайна",
"такси",
"талант",
"таможня",
"танец",
"тарелка",
"таскать",
"тахта",
"тачка",
"таять",
"тварь",
"твердый",
"творить",
"театр",
"тезис",
"текст",
"тело",
"тема",
"тень",
"теория",
"теплый",
"терять",
"тесный",
"тетя",
"техника",
"течение",
"тигр",
"типичный",
"тираж",
"титул",
"тихий",
"тишина",
"ткань",
"товарищ",
"толпа",
"тонкий",
"топливо",
"торговля",
"тоска",
"точка",
"тощий",
"традиция",
"тревога",
"трибуна",
"трогать",
"труд",
"трюк",
"тряпка",
"туалет",
"тугой",
"туловище",
"туман",
"тундра",
"тупой",
"турнир",
"тусклый",
"туфля",
"туча",
"туша",
"тыкать",
"тысяча",
"тьма",
"тюльпан",
"тюрьма",
"тяга",
"тяжелый",
"тянуть",
"убеждать",
"убирать",
"убогий",
"убыток",
"уважение",
"уверять",
"увлекать",
"угнать",
"угол",
"угроза",
"удар",
"удивлять",
"удобный",
"уезд",
"ужас",
"ужин",
"узел",
"узкий",
"узнавать",
"узор",
"уйма",
"уклон",
"укол",
"уксус",
"улетать",
"улица",
"улучшать",
"улыбка",
"уметь",
"умиление",
"умный",
"умолять",
"умысел",
"унижать",
"уносить",
"уныние",
"упасть",
"уплата",
"упор",
"упрекать",
"упускать",
"уран",
"урна",
"уровень",
"усадьба",
"усердие",
"усилие",
"ускорять",
"условие",
"усмешка",
"уснуть",
"успеть",
"усыпать",
"утешать",
"утка",
"уточнять",
"утро",
"утюг",
"уходить",
"уцелеть",
"участие",
"ученый",
"учитель",
"ушко",
"ущерб",
"уютный",
"уяснять",
"фабрика",
"фаворит",
"фаза",
"файл",
"факт",
"фамилия",
"фантазия",
"фара",
"фасад",
"февраль",
"фельдшер",
"феномен",
"ферма",
"фигура",
"физика",
"фильм",
"финал",
"фирма",
"фишка",
"флаг",
"флейта",
"флот",
"фокус",
"фольклор",
"фонд",
"форма",
"фото",
"фраза",
"фреска",
"фронт",
"фрукт",
"функция",
"фуражка",
"футбол",
"фыркать",
"халат",
"хамство",
"хаос",
"характер",
"хата",
"хватать",
"хвост",
"хижина",
"хилый",
"химия",
"хирург",
"хитрый",
"хищник",
"хлам",
"хлеб",
"хлопать",
"хмурый",
"ходить",
"хозяин",
"хоккей",
"холодный",
"хороший",
"хотеть",
"хохотать",
"храм",
"хрен",
"хриплый",
"хроника",
"хрупкий",
"художник",
"хулиган",
"хутор",
"царь",
"цвет",
"цель",
"цемент",
"центр",
"цепь",
"церковь",
"цикл",
"цилиндр",
"циничный",
"цирк",
"цистерна",
"цитата",
"цифра",
"цыпленок",
"чадо",
"чайник",
"часть",
"чашка",
"человек",
"чемодан",
"чепуха",
"черный",
"честь",
"четкий",
"чехол",
"чиновник",
"число",
"читать",
"членство",
"чреватый",
"чтение",
"чувство",
"чугунный",
"чудо",
"чужой",
"чукча",
"чулок",
"чума",
"чуткий",
"чучело",
"чушь",
"шаблон",
"шагать",
"шайка",
"шакал",
"шалаш",
"шампунь",
"шанс",
"шапка",
"шарик",
"шасси",
"шатер",
"шахта",
"шашлык",
"швейный",
"швырять",
"шевелить",
"шедевр",
"шейка",
"шелковый",
"шептать",
"шерсть",
"шестерка",
"шикарный",
"шинель",
"шипеть",
"широкий",
"шить",
"шишка",
"шкаф",
"школа",
"шкура",
"шланг",
"шлем",
"шлюпка",
"шляпа",
"шнур",
"шоколад",
"шорох",
"шоссе",
"шофер",
"шпага",
"шпион",
"шприц",
"шрам",
"шрифт",
"штаб",
"штора",
"штраф",
"штука",
"штык",
"шуба",
"шуметь",
"шуршать",
"шутка",
"щадить",
"щедрый",
"щека",
"щель",
"щенок",
"щепка",
"щетка",
"щука",
"эволюция",
"эгоизм",
"экзамен",
"экипаж",
"экономия",
"экран",
"эксперт",
"элемент",
"элита",
"эмблема",
"эмигрант",
"эмоция",
"энергия",
"эпизод",
"эпоха",
"эскиз",
"эссе",
"эстрада",
"этап",
"этика",
"этюд",
"эфир",
"эффект",
"эшелон",
"юбилей",
"юбка",
"южный",
"юмор",
"юноша",
"юрист",
"яблоко",
"явление",
"ягода",
"ядерный",
"ядовитый",
"ядро",
"язва",
"язык",
"яйцо",
"якорь",
"январь",
"японец",
"яркий",
"ярмарка",
"ярость",
"ярус",
"ясный",
"яхта",
"ячейка",
"ящик",
];
|
use aoc2020::aoc::load_data;
use nom::{
bits::complete::*, branch::*, bytes::complete::*, character::complete::*, combinator::*,
multi::*, sequence::*, IResult,
};
use regex::Regex;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fmt::Debug;
use std::hash::Hash;
use std::io::BufRead;
type Group = Vec<Person>;
type Person = Vec<char>;
fn groups<I>(v: I) -> std::io::Result<Vec<Group>>
where
I: Iterator<Item = std::io::Result<String>>,
{
let mut groups = Vec::with_capacity(32);
let mut current_group = Vec::new();
for line in v {
let l = line?;
if l.is_empty() {
groups.push(current_group);
current_group = Vec::new();
} else {
current_group.push(l.chars().collect());
}
}
groups.push(current_group);
Ok(groups)
}
fn distinct<I, A>(v: I) -> Vec<A>
where
I: Iterator<Item = A>,
A: Hash + Eq,
{
let mut set: HashSet<A> = HashSet::new();
for a in v {
set.insert(a);
}
set.drain().into_iter().collect()
}
fn everyone<I>(groups: I) -> Vec<usize>
where
I: Iterator<Item = Group>,
{
let mut counts = Vec::new();
for group in groups {
let mut count = 0;
let mut map: HashMap<char, usize> = HashMap::new();
for person in group.iter() {
for ans in person.iter() {
let mut value = map.entry(ans.clone()).or_default();
*value += 1;
}
count += 1;
}
println!("map {:?}, count {}", map, count);
let count = map
.drain()
.into_iter()
.filter_map(|(k, c)| if c == count { Some(k) } else { None })
.count();
counts.push(count);
}
counts
}
fn main() -> std::result::Result<(), Box<dyn std::error::Error>> {
let data = load_data("examples/data/day6.txt")?;
let example = load_data("examples/data/day6-example.txt")?;
let mut ex_groups = groups(data.lines())?;
let counts_distinct: Vec<usize> = ex_groups
.iter()
.map(|g| distinct(g.iter().flatten()).len())
.collect();
let counts_everyone: Vec<usize> = everyone(ex_groups.drain(0..ex_groups.len()));
println!("{:?}", ex_groups);
println!("{:?}", counts_distinct);
println!("{:?}", counts_everyone.iter().sum::<usize>());
let set: HashSet<&str> = vec!["a", "a"].into_iter().collect();
println!("{:?}", set);
Ok(())
}
|
pub struct DevTree {
busses: Vec<Box<dyn Bus>>,
}
#[derive(Debug, Copy, Clone)]
pub struct DeviceIdent {
pub bustype: twz::device::BusType,
pub vendor_id: u64,
pub device_id: u64,
pub class: u64,
pub subclass: u64,
}
impl DeviceIdent {
pub fn is_match(&self, other: &DeviceIdent) -> bool {
other.bustype == self.bustype
&& ((other.class == self.class && other.subclass == self.subclass && self.class > 0 && other.class > 0)
|| (other.vendor_id == self.vendor_id && other.device_id == self.device_id && self.device_id > 0 && other.device_id > 0))
}
pub fn new<T: Into<u64>>(bustype: twz::device::BusType, vendor: T, device: T, class: T, sc: T) -> DeviceIdent {
DeviceIdent {
bustype: bustype.into(),
vendor_id: vendor.into(),
device_id: device.into(),
class: class.into(),
subclass: sc.into(),
}
}
}
use crate::bus::Bus;
use crate::busses::create_bus;
use crate::devidentstring;
use crate::drivers::RegisteredDrivers;
use twz::device::DeviceData;
use twz::kso::{KSODirAttachments, KSOType, KSO};
use twz::obj::ProtFlags;
impl DevTree {
pub fn enumerate_busses(root: &KSO<KSODirAttachments>) -> Result<DevTree, twz::TwzErr> {
let mut vec = vec![];
let dir = root.get_dir().unwrap();
for chattach in dir {
let chkso = chattach.into_kso::<DeviceData, { KSOType::Device }>(ProtFlags::READ);
if let Some(chkso) = chkso {
let dev = chkso.into_device();
let bus = create_bus(dev);
if let Some(bus) = bus {
vec.push(bus);
}
}
}
Ok(DevTree { busses: vec })
}
pub fn init_busses(&mut self) {
for bus in &mut self.busses {
let _res = bus.init();
}
}
pub fn init_devices(&mut self, drivers: &mut RegisteredDrivers) {
for bus in &mut self.busses {
bus.enumerate(&mut |mut dev| {
let ident = bus.identify(&mut dev);
if let Some(ident) = ident {
println!("[devmgr] found device {}", ident.human_readable_string());
drivers.start_driver(bus, dev, ident);
}
Ok(())
});
}
}
}
|
use std::error::Error;
struct Rect {
width: u32,
height: u32,
}
impl Rect {
fn can_hold(&self, other: &Rect) -> bool {
self.width > other.width && self.height > other.height
}
}
fn sqrt(val: f64) -> Result<f64, String> {
if val < 0.0 {
return Err(format!("negative value - {}", val));
}
if val == 0.0 {
return Ok(val);
}
let epsilon: f64 = 0.001;
let mut guess: f64 = 1.0;
for _ in 0..10_000 {
if (val - guess * guess).abs() < epsilon {
return Ok(guess);
}
guess = (val / guess + guess) / 2.0;
}
Err(format!("can't find sqrt for {}", val))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_hold() {
let r1 = Rect {
width: 10,
height: 20,
};
let r2 = Rect {
width: 5,
height: 17,
};
assert!(r1.can_hold(&r2));
assert!(!r2.can_hold(&r1));
}
#[test]
fn check_sqrt() {
assert_eq!(1.4142156862745097, sqrt(2.0).unwrap());
assert_eq!(0.0, sqrt(0.0).unwrap());
}
}
|
#[derive(Debug)]
enum Grade {
Passed,
NotPassed
}
fn is_passed(score: f64) -> Grade {
match score {
x if x < 60.0 => Grade::NotPassed,
_ => Grade::Passed
}
}
fn main() {
for score in 58..62 {
println!("{} is {:?}", score, is_passed(score as f64));
}
const SC: f64 = 59.5;
println!("{} is {:?}", SC, is_passed(SC as f64));
}
|
use std::io;
use std::marker::PhantomData;
use std::net::SocketAddr;
use std::time::Duration;
use crate::backlog::Backlog;
use crate::client::{SocketClient, SocketStatus};
use bincode::Result;
use serde::{de::DeserializeOwned, Serialize};
use socket2::{Domain, Socket, Type};
pub struct SocketServer<Req, Res>
where
Req: DeserializeOwned,
Res: Serialize,
{
streams: Vec<SocketClient<Res, Req>>,
listener: Socket,
_request: PhantomData<Req>,
_response: PhantomData<Res>,
}
impl<Req, Res> SocketServer<Req, Res>
where
Req: DeserializeOwned,
Res: Serialize,
{
pub fn try_new(addr: SocketAddr, backlog: Backlog) -> io::Result<Self> {
let domain = match addr {
SocketAddr::V4(_) => Domain::ipv4(),
SocketAddr::V6(_) => Domain::ipv6(),
};
let socket = Socket::new(domain, Type::stream(), None)?;
socket.bind(&addr.into())?;
socket.listen(backlog.into())?;
socket.set_nonblocking(true)?;
Ok(Self {
streams: vec![],
listener: socket,
_request: PhantomData::default(),
_response: PhantomData::default(),
})
}
}
impl<Req, Res> SocketServer<Req, Res>
where
Req: DeserializeOwned,
Res: Serialize,
{
pub fn run<H, P>(mut self, mut handler: H, post: P) -> Result<()>
where
H: FnMut(Req) -> Res,
P: Fn(&mut Self) -> PostServing,
{
loop {
if let Some(server_client) = self.accept()? {
self.streams.push(server_client);
}
for idx in (0..self.streams.len()).rev() {
let client = &mut self.streams[idx];
if let SocketStatus::Closed = client.response(|req| handler(req))? {
self.streams.remove(idx);
}
}
match post(&mut self) {
PostServing::Wait(time) => std::thread::sleep(time),
PostServing::Yield => std::thread::yield_now(),
PostServing::Continue => continue,
PostServing::Stop => break Ok(()),
}
}
}
pub fn has_connections(&self) -> bool {
!self.streams.is_empty()
}
pub fn num_connections(&self) -> usize {
self.streams.len()
}
fn accept(&mut self) -> io::Result<Option<SocketClient<Res, Req>>> {
match self.listener.accept() {
Ok((stream, _)) => Ok(Some(SocketClient::try_from_stream(stream)?)),
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Ok(None),
Err(error) => Err(error),
}
}
}
pub enum PostServing {
Wait(Duration),
Yield,
Continue,
Stop,
}
|
use pcap::*;
fn main() {
match pcap_findalldevs() {
Ok(devices) => {
devices.iter().for_each(|device| {
println!("{:#?}", device)
});
}
Err(err) => {
panic!("{:?}", err);
}
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use math::StarkField;
// FRI OPTIONS
// ================================================================================================
/// FRI protocol config options for proof generation and verification.
#[derive(Clone, PartialEq, Eq)]
pub struct FriOptions {
folding_factor: usize,
max_remainder_size: usize,
blowup_factor: usize,
}
impl FriOptions {
/// Returns a new [FriOptions] struct instantiated with the specified parameters.
///
/// # Panics
/// Panics if:
/// * `blowup_factor` is not a power of two.
/// * `folding_factor` is not 4, 8, or 16.
/// * `max_remainder_size` is not at least twice the size of the `blowup_factor`.
pub fn new(blowup_factor: usize, folding_factor: usize, max_remainder_size: usize) -> Self {
// TODO: change panics to errors
assert!(
blowup_factor.is_power_of_two(),
"blowup factor must be a power of two, but was {}",
blowup_factor
);
assert!(
folding_factor == 4 || folding_factor == 8 || folding_factor == 16,
"folding factor {} is not supported",
folding_factor
);
assert!(
max_remainder_size >= folding_factor * 2,
"expected max remainder size to be at least {}, but was {}",
folding_factor * 2,
max_remainder_size
);
FriOptions {
folding_factor,
max_remainder_size,
blowup_factor,
}
}
/// Returns the offset by which the evaluation domain is shifted.
///
/// The domain is shifted by multiplying every element in the domain by this offset.
///
/// Currently, the offset is hard-coded to be the primitive element in the field specified by
/// type parameter `B`.
pub fn domain_offset<B: StarkField>(&self) -> B {
B::GENERATOR
}
/// Returns the factor by which the degree of a polynomial is reduced with each FRI layer.
///
/// In combination with `max_remainder_size` this property defines how many FRI layers are
/// needed for an evaluation domain of a given size.
pub fn folding_factor(&self) -> usize {
self.folding_factor
}
/// Returns maximum allowed remainder (last FRI layer) size.
///
/// In combination with `folding_factor` this property defines how many FRI layers are needed
/// for an evaluation domain of a given size.
pub fn max_remainder_size(&self) -> usize {
self.max_remainder_size
}
/// Returns a blowup factor of the evaluation domain.
///
/// Specifically, if the polynomial for which the FRI protocol is executed is of degree `d`
/// where `d` is one less than a power of two, then the evaluation domain size will be
/// equal to `(d + 1) * blowup_factor`.
pub fn blowup_factor(&self) -> usize {
self.blowup_factor
}
/// Computes and return the number of FRI layers required for a domain of the specified size.
///
/// The remainder layer (the last FRI layer) is not included in the returned value.
///
/// The number of layers for a given domain size is defined by the `folding_factor` and
/// `max_remainder_size` settings.
pub fn num_fri_layers(&self, mut domain_size: usize) -> usize {
let mut result = 0;
while domain_size > self.max_remainder_size {
domain_size /= self.folding_factor;
result += 1;
}
result
}
/// Computes and returns the size of the remainder layer (the last FRI layer) for a domain of
/// the specified size.
///
/// The size of the remainder layer for a given domain size is defined by the `folding_factor`
/// and `max_remainder_size` settings.
pub fn fri_remainder_size(&self, mut domain_size: usize) -> usize {
while domain_size > self.max_remainder_size {
domain_size /= self.folding_factor;
}
domain_size
}
}
|
use anyhow::Result;
use std::{collections::HashMap, fs};
struct Day15 {
starting: Vec<usize>,
numbers: HashMap<usize, usize>,
last: Option<usize>,
index: usize,
}
impl Iterator for Day15 {
type Item = usize;
fn next(&mut self) -> Option<Self::Item> {
let result = if self.index < self.starting.len() {
self.starting[self.index]
} else {
match self.numbers.get(&self.last.unwrap()) {
None => 0,
Some(seen_at) => (self.index - 1) - *seen_at,
}
};
if let Some(last) = self.last {
self.numbers.insert(last, self.index - 1);
}
self.last = Some(result);
self.index += 1;
Some(result)
}
}
impl From<Vec<usize>> for Day15 {
fn from(starting: Vec<usize>) -> Self {
Day15 {
last: None,
numbers: HashMap::new(),
starting: starting,
index: 0,
}
}
}
fn do_the_thing(input: &str, nth: usize) -> usize {
let mut day15: Day15 = input
.trim()
.split(',')
.map(|l| l.parse().unwrap())
.collect::<Vec<usize>>()
.into();
day15.nth(nth - 1).unwrap()
}
fn main() -> Result<()> {
let input = fs::read_to_string("input.txt")?;
println!("{:?}", do_the_thing(&input, 30000000));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("0,3,6", 2020 => 436)]
#[test_case("1,3,2", 2020 => 1)]
#[test_case("2,1,3", 2020 => 10)]
#[test_case("1,2,3", 2020 => 27)]
#[test_case("2,3,1", 2020 => 78)]
#[test_case("3,2,1", 2020 => 438)]
#[test_case("3,1,2", 2020 => 1836)]
#[test_case("0,3,6", 30000000 => 175594)]
#[test_case("1,3,2", 30000000 => 2578)]
#[test_case("2,1,3", 30000000 => 3544142)]
#[test_case("1,2,3", 30000000 => 261214)]
#[test_case("2,3,1", 30000000 => 6895259)]
#[test_case("3,2,1", 30000000 => 18)]
#[test_case("3,1,2", 30000000 => 362)]
fn second(input: &str, nth: usize) -> usize {
do_the_thing(&input, nth)
}
#[test]
fn test_iterator() {
let iter: Day15 = vec![0, 3, 6].into();
assert_eq!(
iter.take(10).collect::<Vec<_>>(),
vec![0, 3, 6, 0, 3, 3, 1, 0, 4, 0]
);
}
}
|
use super::HyperLTL::*;
use super::Op::*;
use super::QuantKind::*;
use super::*;
use std::collections::HashSet;
impl HyperLTL {
fn check_arity(&self) {
match self {
Quant(_, _, scope) => scope.check_arity(),
Appl(op, inner) => {
inner.iter().for_each(|ele| ele.check_arity());
match op.arity() {
Some(arity) => assert_eq!(arity, inner.len()),
None => {}
}
}
Prop(_, _) => {}
}
}
pub fn is_ltl(&self) -> bool {
self.is_quantifier_free()
}
/// Checks if a formula contains no quantifier, i.e., is LTL
pub fn is_quantifier_free(&self) -> bool {
match self {
Quant(_, _, _) => false,
Appl(_, inner) => inner.iter().all(|ele| ele.is_quantifier_free()),
Prop(_, _) => true,
}
}
/// Checks if a formula contains a quantifier prefix, followed by LTL body
pub fn is_hyperltl(&self) -> bool {
match self {
Quant(_, _, scope) => scope.is_hyperltl() || scope.is_quantifier_free(),
_ => false,
}
}
/// Returns the set of Props contained in the formula
pub fn get_propositions(&self) -> HashSet<&str> {
match self {
Quant(_, _, inner) => inner.get_propositions(),
Appl(_, inner) => inner.iter().fold(HashSet::new(), |set, ele| {
set.union(&ele.get_propositions()).map(|e| *e).collect()
}),
Prop(prop, _) => {
let mut singleton = HashSet::new();
singleton.insert(prop.as_ref());
singleton
}
}
}
pub fn get_occurrences(&self) -> HashSet<String> {
match self {
Quant(_, _, inner) => inner.get_occurrences(),
Appl(_, inner) => inner.iter().fold(HashSet::new(), |set, ele| {
set.union(&ele.get_occurrences())
.map(|e| e.clone())
.collect()
}),
Prop(_, _) => {
let mut singleton = HashSet::new();
singleton.insert(format!("{}", self));
singleton
}
}
}
pub fn get_body(&self) -> &HyperLTL {
match self {
Quant(_, _, inner) => inner.get_body(),
_ => self,
}
}
pub fn get_quantifier(&self) -> Vec<(QuantKind, Vec<String>)> {
let mut res = Vec::new();
self.quantifier(&mut res);
res
}
fn quantifier(&self, quant: &mut Vec<(QuantKind, Vec<String>)>) {
match self {
Quant(kind, param, inner) => {
quant.push((*kind, param.clone()));
inner.quantifier(quant);
}
_ => (),
}
}
/// Brings formula to negation normal form (NNF) and collapses consecutive quantifier of the same type
pub fn normalize(mut self) -> Self {
self.check_arity();
self.remove_derived();
self.push_next().to_nnf(false).simplify().flatten()
}
/// Removes all operators that do not have a dual operation, i.e., `Implication` and `WeakUntil`
fn remove_derived(&mut self) {
match self {
Quant(kind, vars, scope) => scope.remove_derived(),
Appl(op, inner) => {
inner.iter_mut().for_each(|subf| subf.remove_derived());
match op {
Implication => {
// euivalent to `!lhs || rhs`
let lhs = inner.remove(0);
inner.insert(0, Appl(Negation, vec![lhs]));
*op = Disjunction;
}
WeakUntil => {
// equivelant to `G lhs || lhs U rhs`
let lhs = inner[0].clone();
let dummy = Appl(Op::True, vec![]);
*op = Op::Until;
let old = std::mem::replace(self, dummy);
*self = Appl(Op::Disjunction, vec![Appl(Op::Globally, vec![lhs]), old])
}
_ => {}
}
}
Prop(_, _) => {}
}
}
// pushes next operator over other temporal operators
fn push_next(self) -> Self {
match self {
Appl(Op::Next, mut inner) => {
let inner = inner.pop().unwrap();
match inner {
Appl(Op::Globally, inner) => Appl(
Op::Globally,
inner
.into_iter()
.map(|subf| Appl(Op::Next, vec![subf]).push_next())
.collect(),
),
Appl(Op::Finally, inner) => Appl(
Op::Finally,
inner
.into_iter()
.map(|subf| Appl(Op::Next, vec![subf]).push_next())
.collect(),
),
t => Appl(Op::Next, vec![t]),
}
}
Appl(op, inner) => Appl(op, inner.into_iter().map(|subf| subf.push_next()).collect()),
Prop(_, _) => self,
_ => unreachable!(),
}
}
fn to_nnf(self, negated: bool) -> HyperLTL {
match self {
Quant(mut qtype, params, scope) => {
if negated {
qtype.negate()
}
Quant(qtype, params, scope.to_nnf(negated).into())
}
Appl(Negation, expr) => {
assert_eq!(expr.len(), 1);
expr.into_iter().next().unwrap().to_nnf(!negated)
}
Appl(mut op, mut inner) => {
if negated {
op.negate();
}
if op != Op::Equivalence && op != Op::Exclusion {
inner = inner.into_iter().map(|subf| subf.to_nnf(negated)).collect();
}
Appl(op, inner)
}
Prop(name, path) => {
if negated {
Appl(Negation, vec![Prop(name, path)])
} else {
Prop(name, path)
}
}
}
}
fn flatten(self) -> HyperLTL {
match self {
Quant(qtype, mut params, mut scope) => {
scope = match *scope {
Quant(other_qtype, other_params, other_scope) => {
if qtype == other_qtype {
// quantifiers can be collapsed
params.extend(other_params);
let new_quant = Quant(qtype, params, other_scope);
return new_quant.flatten();
} else {
Quant(other_qtype, other_params, other_scope)
.flatten()
.into()
}
}
_ => scope.flatten().into(),
};
Quant(qtype, params, scope)
}
Appl(op, mut inner) => {
inner = inner.into_iter().map(|subf| subf.flatten()).collect();
let mut new_inner = Vec::new();
for subf in inner.into_iter() {
match subf {
Appl(other_op, other_inner) => {
if other_op == op && op.is_chainable() {
new_inner.extend(other_inner)
} else {
new_inner.push(Appl(other_op, other_inner))
}
}
subf => new_inner.push(subf),
}
}
Appl(op, new_inner)
}
Prop(name, path) => Prop(name, path),
}
}
// remove true and false
fn simplify(self) -> HyperLTL {
assert!(self.is_ltl());
match self {
Appl(op, mut inner) => {
inner = inner.into_iter().map(|subf| subf.simplify()).collect();
match op {
Op::Conjunction => {
if inner.contains(&HyperLTL::constant_false()) {
return HyperLTL::constant_false();
}
inner.retain(|subf| subf != &HyperLTL::constant_true());
if inner.is_empty() {
return HyperLTL::constant_true();
} else if inner.len() == 1 {
return inner.pop().unwrap();
}
HyperLTL::Appl(Op::Conjunction, inner)
}
Op::Disjunction => {
if inner.contains(&HyperLTL::constant_true()) {
return HyperLTL::constant_true();
}
inner.retain(|subf| subf != &HyperLTL::constant_false());
if inner.is_empty() {
return HyperLTL::constant_false();
} else if inner.len() == 1 {
return inner.pop().unwrap();
}
HyperLTL::Appl(Op::Disjunction, inner)
}
op => HyperLTL::Appl(op, inner),
}
}
Prop(name, path) => Prop(name, path),
_ => unreachable!(),
}
}
/// checks whether an LTL formula is in the syntactic safety fragment
fn is_syntactic_safe(&self) -> bool {
assert!(self.is_ltl());
match self {
Appl(op, inner) => op.is_safety() && inner.iter().all(|subf| subf.is_syntactic_safe()),
Prop(_, _) => true,
_ => unreachable!(),
}
}
/// checks whether an LTL formula is an invariant, i.e., of the form `G (propositional)`
fn is_invariant(&self) -> bool {
assert!(self.is_ltl());
match self {
Appl(Globally, inner) => inner[0].is_propositional(),
_ => false,
}
}
/// checks whether an LTL formula is propositional, i.e., does not contain temporal operators
fn is_propositional(&self) -> bool {
assert!(self.is_ltl());
match self {
Appl(op, inner) => {
op.is_propositional() && inner.iter().all(|subf| subf.is_propositional())
}
Prop(_, _) => true,
_ => unreachable!(),
}
}
/// checks whether an LTL formula is invariant up to one Next operator, e.g., `G (a <-> X a)`
fn is_prime_invariant(&self) -> bool {
assert!(self.is_ltl());
match self {
Appl(Globally, inner) => inner[0].is_nearly_propositional(false),
_ => false,
}
}
/// checks whether an LTL formula is nearly propositional, i.e., the only temporal operator is `X` with nesting depth 1
fn is_nearly_propositional(&self, mut seen_x: bool) -> bool {
assert!(self.is_ltl());
match self {
Appl(op, inner) => {
if *op == Next {
if seen_x {
return false;
}
seen_x = true;
} else if !op.is_propositional() {
return false;
}
inner
.iter()
.all(|subf| subf.is_nearly_propositional(seen_x))
}
Prop(_, _) => true,
_ => unreachable!(),
}
}
/// checks whether an LTL formula is an invariant, i.e., of the form `G (propositional)`
fn is_reccurrence(&self) -> bool {
assert!(self.is_ltl());
match self {
Appl(Globally, inner) => match &inner[0] {
Appl(Finally, inner) => inner[0].is_propositional(),
_ => false,
},
_ => false,
}
}
pub fn partition(self) -> LTLPartitioning {
assert!(self.is_ltl());
let mut assumptions: Vec<HyperLTL> = Vec::new();
let mut guarantee: Option<HyperLTL> = None;
//println!("partition {}", &self);
match self {
Appl(Disjunction, inner) => {
// may be of form assumptions => guarantees
for subf in inner.into_iter() {
match subf {
Appl(Conjunction, inner) => {
match guarantee {
None => guarantee = Some(Appl(Conjunction, inner)),
Some(Appl(Conjunction, other)) => {
// check which one has more conjuncts
if inner.len() > other.len() {
// switch
guarantee = Some(Appl(Conjunction, inner));
assumptions.push(Appl(Conjunction, other));
} else {
guarantee = Some(Appl(Conjunction, other));
assumptions.push(Appl(Conjunction, inner));
}
}
_ => unreachable!(),
}
}
_ => {
assumptions.push(subf);
}
}
}
}
Appl(Conjunction, inner) => {
// no (global) assumptions
guarantee = Some(Appl(Conjunction, inner));
}
Quant(_, _, _) => unreachable!(),
f => guarantee = Some(f),
}
assert!(guarantee.is_some() || !assumptions.is_empty());
let guarantees = if let Some(guarantee) = guarantee {
if let Appl(Conjunction, inner) = guarantee {
inner
} else {
return LTLPartitioning {
preset_assumptions: Vec::new(),
preset_guarantees: Vec::new(),
invariant_assumptions: Vec::new(),
invariant_guarantees: Vec::new(),
prime_invariant_assumptions: Vec::new(),
prime_invariant_guarantees: Vec::new(),
safety_assumptions: Vec::new(),
safety_guarantees: Vec::new(),
reccurrence_assumptions: Vec::new(),
reccurrence_guarantees: Vec::new(),
liveness_assumptions: Vec::new(),
liveness_guarantees: vec![guarantee],
};
}
} else {
return LTLPartitioning {
preset_assumptions: Vec::new(),
preset_guarantees: Vec::new(),
invariant_assumptions: Vec::new(),
invariant_guarantees: Vec::new(),
prime_invariant_assumptions: Vec::new(),
prime_invariant_guarantees: Vec::new(),
safety_assumptions: Vec::new(),
safety_guarantees: Vec::new(),
reccurrence_assumptions: Vec::new(),
reccurrence_guarantees: Vec::new(),
liveness_assumptions: Vec::new(),
liveness_guarantees: vec![assumptions
.into_iter()
.fold(HyperLTL::constant_false(), |val, ass| {
HyperLTL::new_binary(Op::Disjunction, val, ass)
})],
};
};
// negate assumptions
assumptions = assumptions
.into_iter()
.map(|subf| subf.to_nnf(true))
.collect();
//println!("assumptions {:?}", assumptions);
//println!("guarantees {:?}", guarantees);
// filter
let (safety_assumptions, liveness_assumptions): (Vec<HyperLTL>, Vec<HyperLTL>) =
assumptions
.into_iter()
.partition(|subf| subf.is_syntactic_safe());
let (invariant_assumptions, safety_assumptions): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_assumptions
.into_iter()
.partition(|subf| subf.is_invariant());
let (preset_assumptions, safety_assumptions): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_assumptions
.into_iter()
.partition(|subf| subf.is_propositional());
let (prime_invariant_assumptions, safety_assumptions): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_assumptions
.into_iter()
.partition(|subf| subf.is_prime_invariant());
let (reccurrence_assumptions, liveness_assumptions): (Vec<HyperLTL>, Vec<HyperLTL>) =
liveness_assumptions
.into_iter()
.partition(|subf| subf.is_reccurrence());
let (safety_guarantees, liveness_guarantees): (Vec<HyperLTL>, Vec<HyperLTL>) = guarantees
.into_iter()
.partition(|subf| subf.is_syntactic_safe());
let (invariant_guarantees, safety_guarantees): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_guarantees
.into_iter()
.partition(|subf| subf.is_invariant());
let (preset_guarantees, safety_guarantees): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_guarantees
.into_iter()
.partition(|subf| subf.is_propositional());
let (prime_invariant_guarantees, safety_guarantees): (Vec<HyperLTL>, Vec<HyperLTL>) =
safety_guarantees
.into_iter()
.partition(|subf| subf.is_prime_invariant());
let (reccurrence_guarantees, liveness_guarantees): (Vec<HyperLTL>, Vec<HyperLTL>) =
liveness_guarantees
.into_iter()
.partition(|subf| subf.is_reccurrence());
LTLPartitioning {
preset_assumptions,
preset_guarantees,
invariant_assumptions,
invariant_guarantees,
prime_invariant_assumptions,
prime_invariant_guarantees,
safety_assumptions,
safety_guarantees,
reccurrence_assumptions,
reccurrence_guarantees,
liveness_assumptions,
liveness_guarantees,
}
}
pub fn contains_propositional_quantifier(&self) -> Vec<String> {
assert!(self.is_hyperltl());
let body = self.get_body();
let props = self.get_propositions();
let mut ret = Vec::new();
for (kind, quantifiers) in self.get_quantifier() {
for q in quantifiers {
if props.contains(q.as_str()) {
ret.push(q);
}
}
}
ret
}
pub fn replace_propositional_quantifier(&mut self, quants: &[String]) {
match self {
Quant(_, _, inner) => inner.replace_propositional_quantifier(quants),
Appl(_, ops) => ops
.iter_mut()
.for_each(|f| f.replace_propositional_quantifier(quants)),
Prop(name, path) => {
if path.is_some() {
return;
}
if quants.contains(name) {
*path = Some(name.to_string());
*name = "dummy".to_string();
}
}
}
}
}
#[derive(Debug, Default)]
pub struct LTLPartitioning {
pub preset_assumptions: Vec<HyperLTL>,
pub preset_guarantees: Vec<HyperLTL>,
pub invariant_assumptions: Vec<HyperLTL>,
pub invariant_guarantees: Vec<HyperLTL>,
pub prime_invariant_assumptions: Vec<HyperLTL>,
pub prime_invariant_guarantees: Vec<HyperLTL>,
pub safety_assumptions: Vec<HyperLTL>,
pub safety_guarantees: Vec<HyperLTL>,
pub reccurrence_assumptions: Vec<HyperLTL>,
pub reccurrence_guarantees: Vec<HyperLTL>,
pub liveness_assumptions: Vec<HyperLTL>,
pub liveness_guarantees: Vec<HyperLTL>,
}
impl std::fmt::Display for LTLPartitioning {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
writeln!(f, "preset assumptions ({}):", self.preset_assumptions.len())?;
for subf in &self.preset_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(f, "preset guarantees ({}):", self.preset_guarantees.len())?;
for subf in &self.preset_guarantees {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"invariant assumptions ({}):",
self.invariant_assumptions.len()
)?;
for subf in &self.invariant_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"invariant guarantees ({}):",
self.invariant_guarantees.len()
)?;
for subf in &self.invariant_guarantees {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"prime invariant assumptions ({}):",
self.prime_invariant_assumptions.len()
)?;
for subf in &self.prime_invariant_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"prime invariant guarantees ({}):",
self.prime_invariant_guarantees.len()
)?;
for subf in &self.prime_invariant_guarantees {
writeln!(f, "\t{}", subf)?;
}
writeln!(f, "safety assumptions ({}):", self.safety_assumptions.len())?;
for subf in &self.safety_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(f, "safety guarantees ({}):", self.safety_guarantees.len())?;
for subf in &self.safety_guarantees {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"reccurrence assumptions ({}):",
self.reccurrence_assumptions.len()
)?;
for subf in &self.reccurrence_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"reccurrence guarantees ({}):",
self.reccurrence_guarantees.len()
)?;
for subf in &self.reccurrence_guarantees {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"liveness assumptions ({}):",
self.liveness_assumptions.len()
)?;
for subf in &self.liveness_assumptions {
writeln!(f, "\t{}", subf)?;
}
writeln!(
f,
"liveness guarantees ({}):",
self.liveness_guarantees.len()
)?;
for subf in &self.liveness_guarantees {
writeln!(f, "\t{}", subf)?;
}
Ok(())
}
}
impl QuantKind {
fn negate(&mut self) {
*self = match self {
Exists => Forall,
Forall => Exists,
}
}
}
impl Op {
fn negate(&mut self) {
*self = match self {
Next => Next,
Finally => Globally,
Globally => Finally,
Conjunction => Disjunction,
Disjunction => Conjunction,
Exclusion => Equivalence,
Equivalence => Exclusion,
Until => Release,
Release => Until,
True => False,
False => True,
_ => unreachable!(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_binary() {
let expr = Appl(
Disjunction,
vec![
Appl(Negation, vec![Prop("a".into(), None)]),
Prop("b".into(), None),
],
);
assert!(expr.is_quantifier_free());
assert!(!expr.is_hyperltl());
}
#[test]
fn example_unary() {
let expr = Appl(Negation, vec![Appl(Globally, vec![Prop("a".into(), None)])]);
assert!(expr.is_quantifier_free());
assert!(!expr.is_hyperltl());
}
#[test]
fn example_quantifier() {
let expr = Quant(
Forall,
vec!["pi".into()],
Box::new(Prop("a".into(), Some("pi".into()))),
);
assert!(!expr.is_quantifier_free());
assert!(expr.is_hyperltl());
}
#[test]
fn example_get_propositionns() {
let expr = Appl(
Conjunction,
vec![
Appl(Negation, vec![Prop("a".into(), None)]),
Appl(
Disjunction,
vec![Prop("b".into(), None), Prop("c".into(), None)],
),
],
);
let props = expr.get_propositions();
assert!(props.contains("a"));
assert!(props.contains("b"));
assert!(props.contains("c"));
}
#[test]
fn test_removed_derived_implication() {
let mut before = Appl(
Implication,
vec![Prop("a".into(), None), Prop("b".into(), None)],
);
let after = Appl(
Disjunction,
vec![
Appl(Negation, vec![Prop("a".into(), None)]),
Prop("b".into(), None),
],
);
before.remove_derived();
assert_eq!(before, after);
}
#[test]
fn test_removed_derived_weak_until() {
let mut before = Appl(
WeakUntil,
vec![Prop("a".into(), None), Prop("b".into(), None)],
);
let after = Appl(
Disjunction,
vec![
Appl(Globally, vec![Prop("a".into(), None)]),
Appl(Until, vec![Prop("a".into(), None), Prop("b".into(), None)]),
],
);
before.remove_derived();
assert_eq!(before, after);
}
#[test]
fn test_nnf_transformation() {
let a = Prop("a".into(), None);
let b = Prop("b".into(), None);
let before = Appl(
Negation,
vec![Appl(
Disjunction,
vec![
Appl(Globally, vec![a.clone()]),
Appl(Until, vec![a.clone(), b.clone()]),
],
)],
);
let after = HyperLTL::new_binary(
Conjunction,
HyperLTL::new_unary(Finally, HyperLTL::new_unary(Negation, a.clone())),
HyperLTL::new_binary(
Release,
HyperLTL::new_unary(Negation, a.clone()),
HyperLTL::new_unary(Negation, b.clone()),
),
);
assert_eq!(before.to_nnf(false), after);
}
#[test]
fn test_flatten() {
let a = Prop("a".into(), None);
let b = Prop("b".into(), None);
let c = Prop("c".into(), None);
let before = HyperLTL::new_binary(
Conjunction,
HyperLTL::new_binary(
Conjunction,
a.clone(),
HyperLTL::new_binary(Conjunction, b.clone(), c.clone()),
),
HyperLTL::new_binary(
Disjunction,
HyperLTL::new_binary(Disjunction, a.clone(), b.clone()),
c.clone(),
),
);
let after = Appl(
Conjunction,
vec![
a.clone(),
b.clone(),
c.clone(),
Appl(Disjunction, vec![a.clone(), b.clone(), c.clone()]),
],
);
assert_eq!(before.flatten(), after);
}
}
|
mod from_into;
mod tryfrom_tryinto;
mod to_from_strings;
fn main() {
from_into::main();
tryfrom_tryinto::main();
to_from_strings::main();
} |
use crate::{
db::HirDatabase,
ids::{FunctionType, Identifier, RecordType, Type},
lower::{
error::{LoweringError, Missing},
Lower, LoweringCtx,
},
types::{FunctionTypeData, RecordTypeData, TypeData},
};
use christmas_tree::RootOwnership;
use valis_ds::Intern;
use valis_syntax::{
ast::{self, NameOwner},
syntax::SyntaxTree,
};
impl<R: RootOwnership<SyntaxTree>> Lower<ast::TypeRaw<R>> for Type {
fn lower<'a, DB: HirDatabase>(
syntax: ast::TypeRaw<R>,
ctx: &mut LoweringCtx<'a, DB>,
) -> Result<Self, LoweringError> {
use ast::TypeRawKind;
let type_data = match syntax.kind() {
TypeRawKind::TupleType(inner) => TypeData::Record(RecordType::lower(inner, ctx)?),
TypeRawKind::StructType(inner) => TypeData::Record(RecordType::lower(inner, ctx)?),
TypeRawKind::VarType(inner) => TypeData::Variable(Identifier::lower(inner, ctx)?),
TypeRawKind::FnType(inner) => TypeData::Function(FunctionType::lower(inner, ctx)?),
};
Ok(type_data.intern(ctx.tables()))
}
}
impl<R: RootOwnership<SyntaxTree>> Lower<ast::FnTypeRaw<R>> for FunctionType {
fn lower<'a, DB: HirDatabase>(
syntax: ast::FnTypeRaw<R>,
ctx: &mut LoweringCtx<'a, DB>,
) -> Result<Self, LoweringError> {
let parameter_syn = syntax.param_type().ok_or(LoweringError {
kind: Missing::ParamType.into(),
})?;
let parameter = Type::lower(parameter_syn, ctx)?;
let return_type = syntax
.return_type()
.map(|return_type_syn| Type::lower(return_type_syn, ctx))
.transpose()?;
let function_type = FunctionTypeData {
parameter,
return_type,
}
.intern(ctx.tables());
Ok(function_type)
}
}
impl<R: RootOwnership<SyntaxTree>> Lower<ast::StructTypeRaw<R>> for RecordType {
fn lower<'a, DB: HirDatabase>(
syntax: ast::StructTypeRaw<R>,
ctx: &mut LoweringCtx<'a, DB>,
) -> Result<Self, LoweringError> {
let (labels, types) = {
let mut labels = Vec::new();
let mut types = Vec::new();
let _ =
syntax
.fields()
.try_fold((&mut labels, &mut types), |(labels, types), field| {
let label_syn = field.name().ok_or(LoweringError {
kind: Missing::Name.into(),
})?;
let label = Identifier::lower(label_syn, ctx)?;
let typ_syn = field.inner_type().ok_or(LoweringError {
kind: Missing::StructFieldType.into(),
})?;
let typ = Type::lower(typ_syn, ctx)?;
labels.push(label);
types.push(typ);
Ok((labels, types))
})?;
(labels, types)
};
let record_expr = RecordTypeData { labels, types }.intern(ctx.tables());
Ok(record_expr)
}
}
impl<R: RootOwnership<SyntaxTree>> Lower<ast::TupleTypeRaw<R>> for RecordType {
fn lower<'a, DB: HirDatabase>(
syntax: ast::TupleTypeRaw<R>,
ctx: &mut LoweringCtx<'a, DB>,
) -> Result<Self, LoweringError> {
let types = syntax
.fields()
.map(|field_syn| Type::lower(field_syn, ctx))
.collect::<Result<Vec<_>, _>>()?;
let labels = (0..types.len())
.map(|int_label| Identifier::lower(format!("{}", int_label), ctx))
.collect::<Result<_, _>>()?;
let record_type = RecordTypeData { labels, types }.intern(ctx.tables());
Ok(record_type)
}
}
|
#[macro_use]
extern crate diesel;
mod config;
pub mod models;
mod schema;
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum DatabaseError {
#[error("Config error: {0}")]
Config(config::ConfigError),
}
pub type DatabaseResult<T> = Result<T, DatabaseError>;
pub struct Database {
pool: Pool<ConnectionManager<PgConnection>>,
}
impl Database {
pub fn connect() -> Self {
let database_url = kankyo::key("DATABASE_URL").expect("DATABASE_URL");
let manager = ConnectionManager::<PgConnection>::new(database_url);
let pool = Pool::builder()
.max_size(15)
.build(manager)
.expect("Failed to create connection pool");
Database { pool }
}
fn conn(&self) -> PooledConnection<ConnectionManager<PgConnection>> {
self.pool
.clone()
.get()
.expect("Attempt to get connection timed out")
}
}
|
#[macro_use]
extern crate nom;
use nom::{alphanumeric1, space, types::CompleteStr};
named!(key<CompleteStr,CompleteStr>,
call!(alphanumeric1)
);
named!(operator<CompleteStr,CompleteStr>,
tag!("=")
);
named!(value<CompleteStr,CompleteStr>,
recognize!(many1!(alt_complete!(alphanumeric1 | space)))
);
named!(statement<CompleteStr,(CompleteStr,CompleteStr)>,
ws!(
do_parse!(
k: key >>
operator >>
val: value >>
(k, val)
)
)
);
fn main() {
let mut source = CompleteStr("key = val val val");
let test = statement(source);
println!("{:?}", test);
} |
use std::{process, thread};
use std::error::Error;
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
use std::time::Duration;
use error::SimpleError;
extern crate serde_json;
macro_rules! write_str_ret {
($win:expr, $s:expr) => {
if $win.addstr($s) == ERR {
ret_err!(format!("\ncurses error writing string: {}", $s));
}
}
}
macro_rules! ret_err {
($e:expr) => {
return Err(Box::new(SimpleError::from($e)));
}
}
use std::io::{
BufWriter,
Write,
};
use parser::{
Action,
};
use std::process::{
Command,
Stdio,
Child,
};
extern crate pancurses;
use pancurses::{
ERR,
Window,
endwin,
};
const KEYCODE_ENTER: &'static str = "66";
const KEYCODE_TAB: &'static str = "61";
const KEYCODE_DEL: &'static str = "67";
const CMD: &'static str = "adb";
#[derive(Debug)]
pub(crate) struct Session {
pub(crate) adb: Command,
pub(crate) child: Child,
macros: HashMap<String, Vec<Action>>,
}
impl Session {
pub(crate) fn create(win: &Window, phone: &Option<String>) -> Self {
let mut cmd = Command::new(CMD);
cmd.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if let Some(ref p) = *phone {
cmd.arg("-s");
cmd.arg(p.as_str());
}
cmd.arg("shell");
let child = cmd.spawn();
if child.is_err() {
let e = child.err().unwrap();
win.addstr(&format!("error starting shell: {}\n", e));
win.addstr("Press any key to exit\n");
win.getch();
endwin();
process::exit(1);
}
Self{
adb: cmd,
child: child.ok().unwrap(),
macros: HashMap::new(),
}
}
pub(crate) fn execute(&mut self, v: &Vec<Action>) -> Result<(), Box<Error>> {
let mut stdin = match self.child.stdin.as_mut() {
Some(mut s) => s,
None => ret_err!("failed to get child stdin"),
};
execute(&mut stdin, v, &mut self.macros)
}
pub(crate) fn display_macro_names(&self, win: &Window) -> Result<(), Box<Error>> {
for name in self.macros.keys() {
write_str_ret!(win, &format!("\n- {}", name));
}
Ok(())
}
}
fn execute<W: Write>(w: &mut W, v: &Vec<Action>, macros: &mut HashMap<String, Vec<Action>>) -> Result<(), Box<Error>> {
for action in v {
match *action {
Action::Save(ref fname) => save_macros(fname, macros)?,
Action::Load(ref fname) => load_macros(fname, macros)?,
Action::Wait(n) => thread::sleep(Duration::from_millis(n)),
Action::SendText(ref s) => send_text(w, s)?,
Action::Delete(n) => send_event(w, KEYCODE_DEL, n)?,
Action::Tab => send_event(w, KEYCODE_TAB, 1)?,
Action::Enter => send_event(w, KEYCODE_ENTER, 1)?,
Action::RunMacro(ref name) => {
let entry = macros.get(name);
match entry {
Some(actions) => {
for a in actions.iter() {
execute_nested(w, a, macros)?;
}
},
None => {
ret_err!(format!("macro {} not defined", name));
}
};
},
Action::DefineMacro(ref name, ref actions) => {
macros.insert(name.to_string(), actions.to_vec());
},
_ => {
ret_err!(format!("unexpected action {:?} given to session", *action));
}
};
}
Ok(())
}
fn save_macros<P: AsRef<Path>>(fname: P, macros: &HashMap<String, Vec<Action>>) -> Result<(), Box<Error>> {
let file = File::create(fname)?;
serde_json::to_writer(file, macros)?;
Ok(())
}
fn load_macros<P: AsRef<Path>>(fname: P, macros: &mut HashMap<String, Vec<Action>>) -> Result<(), Box<Error>> {
let file = File::open(fname)?;
let map: HashMap<String, Vec<Action>> = serde_json::from_reader(file)?;
for (k, v) in map.iter() {
macros.insert(k.to_string(), v.to_vec());
}
Ok(())
}
fn execute_nested<W: Write>(w: &mut W, action: &Action, macros: &HashMap<String, Vec<Action>>) -> Result<(), Box<Error>> {
match *action {
Action::SendText(ref s) => send_text(w, s),
Action::Delete(n) => send_event(w, KEYCODE_DEL, n),
Action::Wait(n) => { thread::sleep(Duration::from_millis(n)); Ok(()) },
Action::Tab => send_event(w, KEYCODE_TAB, 1),
Action::Enter => send_event(w, KEYCODE_ENTER, 1),
Action::RunMacro(ref name) => {
let entry = macros.get(name);
match entry {
Some(actions) => {
for a in actions.iter() {
execute_nested(w, a, macros)?;
}
Ok(())
},
None => {
ret_err!(format!("macro {} not defined", name));
}
}
}
Action::DefineMacro(_, _) => {
ret_err!("Cannot define macros inside of macros");
},
_ => {
ret_err!(format!("Command {:?} not executable in nested context", action));
}
}
}
fn send_event<W: Write>(w: &mut W, evt: &str, count: u32) -> Result<(), Box<Error>> {
if count == 0 {
ret_err!("count must be >= 1 in send_event");
}
let mut s = String::from("input keyevent ");
for _ in 0..count {
s.push_str(evt);
s.push(' ');
}
s.pop();
s.push('\n');
send_string(w, &s)
}
fn send_text<W: Write>(w: &mut W, s: &str) -> Result<(), Box<Error>> {
let s = format!("input text '{}'\n", s);
send_string(w, &s)
}
fn send_string<W: Write>(mut w: &mut W, s: &str) -> Result<(), Box<Error>> {
let mut writer = BufWriter::new(&mut w);
writer.write_all(s.as_bytes())?;
Ok(())
}
#[cfg(test)]
mod test {
use std::{str, fs, time};
use parser::Action;
use super::*;
macro_rules! get_macros {
() => {{
let mut macros = HashMap::new();
macros.insert(
String::from("simple"),
vec![
Action::Tab,
Action::Enter,
]
);
macros.insert(
String::from("all_allowed"),
vec![
Action::Delete(2),
Action::SendText(String::from("test")),
Action::RunMacro(String::from("simple")),
]
);
macros.insert(
String::from("bad_one"),
vec![
Action::DefineMacro(
String::from("nested"),
Vec::new()
)
]
);
macros
}}
}
macro_rules! check_val {
($buf:expr, $exp:expr) => {
match str::from_utf8(&$buf) {
Err(e) => panic!("Failed to get buf {:?} as utf8 string: {:?}", $buf, e),
Ok(s) => assert!(
String::from(s) == String::from($exp),
"buffer didn't have expected string `{}` got `{}`", $exp, s
),
}
}
}
macro_rules! is_ok {
($r:expr) => { assert!($r.is_ok(), "Expected Ok but got {:?}", $r) };
}
macro_rules! is_err {
($r:expr) => { assert!($r.is_err(), "Expected Err but got {:?}", $r) };
}
macro_rules! expect_sendtext {
($a:expr, $txt:expr) => {
assert!(match $a {
Action::SendText(ref t) => t == $txt,
_ => false,
},
"Expected Action::SendText(\"{}\") but got {:?}", $txt, $a
);
}
}
macro_rules! expect_tab {
($a:expr) => {
assert!(match $a {
Action::Tab => true,
_ => false,
},
"Expected Action::Tab but got {:?}", $a
);
}
}
macro_rules! expect_enter {
($a:expr) => {
assert!(match $a {
Action::Enter => true,
_ => false,
},
"Expected Action::Enter but got {:?}", $a
);
}
}
#[test]
fn session_send_string() {
let v = "test";
let mut buf = Vec::with_capacity(8);
let r = send_string(&mut buf, &v);
is_ok!(r);
check_val!(buf, v);
}
#[test]
fn session_send_text() {
let v = "test";
let mut buf = Vec::with_capacity(64);
let r = send_text(&mut buf, v);
is_ok!(r);
check_val!(buf, "input text 'test'\n");
}
macro_rules! send_actions {
($exp:expr, $($action:expr),+) => {{
let mut action = Vec::new();
$(
action.push($action);
)*
let mut buf = Vec::with_capacity(64);
let mut macros = get_macros!();
let r = execute(&mut buf, &action, &mut macros);
is_ok!(r);
check_val!(buf, $exp);
(buf, macros)
}};
}
macro_rules! fail_send_actions {
($exp:expr, $($action:expr),+) => {{
let mut action = Vec::new();
$(
action.push($action);
)*
let mut buf = Vec::with_capacity(64);
let mut macros = get_macros!();
let r = execute(&mut buf, &action, &mut macros);
is_err!(r);
check_val!(buf, $exp);
(buf, macros)
}}
}
#[test]
fn session_send_delete() {
send_actions!("input keyevent 67 67\n", Action::Delete(2));
}
#[test]
fn session_send_tab() {
send_actions!("input keyevent 61\n", Action::Tab);
}
#[test]
fn session_send_enter() {
send_actions!("input keyevent 66\n", Action::Enter);
}
#[test]
fn session_send_multiple() {
send_actions!(
"input text 'hi'\ninput keyevent 66\n",
Action::SendText(String::from("hi")), Action::Enter
);
}
#[test]
fn session_do_macro() {
send_actions!(
"input keyevent 61\ninput keyevent 66\n",
Action::RunMacro(String::from("simple"))
);
}
#[test]
fn session_unknown_macro() {
fail_send_actions!(
"",
Action::RunMacro(String::from("doesntexist"))
);
}
#[test]
fn session_bad_event() {
fail_send_actions!(
"",
Action::Delete(0)
);
}
#[test]
fn session_nested() {
send_actions!(
"input keyevent 67 67\ninput text 'test'\ninput keyevent 61\ninput keyevent 66\n",
Action::RunMacro(String::from("all_allowed"))
);
}
#[test]
fn session_no_nested_defines() {
fail_send_actions!(
"",
Action::RunMacro(String::from("bad_one"))
);
}
#[test]
fn session_wait() {
let t = 50u64;
let dur = time::Duration::from_millis(t);
let action = vec![Action::Wait(t)];
let mut buf = Vec::with_capacity(64);
let mut macros = get_macros!();
let now = time::Instant::now();
let r = execute(&mut buf, &action, &mut macros);
let done = now.elapsed();
is_ok!(r);
assert!(done >= dur);
}
#[test]
#[allow(unused_must_use)] // ignore deleting the file
fn session_save_load_macros() {
let fname = String::from("test-0aaaf35cc22e5c694005.json");
send_actions!(
"",
Action::Save(fname.clone())
);
let mut buf = Vec::with_capacity(64);
let mut macros = HashMap::new();
let actions = vec![Action::Load(fname.clone())];
let r = execute(&mut buf, &actions, &mut macros);
is_ok!(r);
check_val!(buf, "");
let key = String::from("simple");
assert!(
macros.contains_key(&key),
"macro simple wasn't loaded: {:?}", macros
);
let loaded = macros.get(&key);
assert!(
loaded.is_some(),
"Couldn't get actions for {}: {:?}", key, loaded
);
expect_tab!(loaded.unwrap()[0]);
expect_enter!(loaded.unwrap()[1]);
fs::remove_file(&fname);
}
#[test]
fn session_define_macro() {
let key = String::from("test");
let (_, macros) = send_actions!(
"",
Action::DefineMacro(
key.clone(),
vec![Action::SendText(String::from("hello")), Action::Enter]
)
);
assert!(
macros.contains_key(&key),
"macro test wasn't defined: {:?}", macros
);
let actions = macros.get(&key);
assert!(
actions.is_some(),
"Couldn't get actions for {}: {:?}", key, actions
);
expect_sendtext!(actions.unwrap()[0], "hello");
expect_enter!(actions.unwrap()[1]);
}
}
|
use std::collections::BinaryHeap;
use std::io;
use std::io::prelude::*;
use std::time::{Instant};
use std::thread;
use ordered_float::OrderedFloat;
use evmap::{ReadHandle, WriteHandle};
use rand::{Rng};
use uuid::Uuid;
const SIZE: u32 = 8 * 1024 * 1024;
const HEAP_CAPACITY: usize = 10;
const NTHREADS: u32 = 4;
mod state;
use state::State;
fn generate_ev_map(heap_number: u32, size: u32) -> (ReadHandle<Uuid, Box<State>>, WriteHandle<Uuid, Box<State>>) {
println!("Generating heap number: {} with size {}", heap_number, size);
let (r, mut w) = evmap::new();
let mut rng = rand::thread_rng();
(0..size).for_each(|_| {
// let id: String = rng
// .sample_iter(&Alphanumeric)
// .take(10)
// .collect();
let id = Uuid::new_v4();
w.insert(id, Box::new(State {
id: id,
score: OrderedFloat(rng.gen_range(0.0, 1.0)),
scorea: OrderedFloat(rng.gen_range(0.0, 1.0)),
scoreb: OrderedFloat(rng.gen_range(0.0, 1.0)),
scorec: OrderedFloat(rng.gen_range(0.0, 1.0)),
scored: OrderedFloat(rng.gen_range(0.0, 1.0)),
}));
});
w.refresh();
return (r, w);
}
fn gen_ev_heap(read_handle: &ReadHandle<Uuid, Box<State>>) -> BinaryHeap<State> {
let mut heap = BinaryHeap::<State>::with_capacity(HEAP_CAPACITY);
let now = Instant::now();
for (key, value) in &read_handle.read().unwrap() {
match value.get_one() {
None => println!("Oops get_one"),
Some(val) => {
heap.push(State {
id: (*val).id,
score: OrderedFloat::from((*val).scorea.into_inner() + (*val).scoreb.into_inner() + (*val).scorec.into_inner() + (*val).scored.into_inner()),
scorea: (*val).scorea,
scoreb: (*val).scoreb,
scorec: (*val).scorec,
scored: (*val).scored,
});
if heap.len() > HEAP_CAPACITY {
heap.pop();
}
},
}
}
println!("Size of heap: {}, Heap generation time: {}", heap.len(), now.elapsed().as_millis());
return heap
}
fn main() {
println!("Starting slide...");
println!("Initializing memory with capacity {}...", SIZE);
let mut maps = Box::new(Vec::new());
use std::sync::mpsc;
let (tx_gen, rx_gen) = mpsc::channel();
for i in 0..NTHREADS {
// Spin up another thread
let tx = tx_gen.clone();
thread::spawn(move || {
let map = generate_ev_map(i, SIZE / NTHREADS);
println!("Sending map {}...", i);
tx.send(map).unwrap();
});
}
for i in 0..NTHREADS {
let received = rx_gen.recv().unwrap();
println!("Received map number {}", i);
maps.push(received);
}
println!("Initializing memory with capacity {}...complete", SIZE);
println!("Scanning with capacity {}...", SIZE);
let now = Instant::now();
let (tx, rx) = mpsc::channel();
for i in 0..NTHREADS {
// Spin up another thread
let (reader, _) = &maps[i as usize];
let cloned_reader = reader.clone();
let tx = tx.clone();
thread::spawn(move || {
let heap = gen_ev_heap(&cloned_reader);
println!("Sending heap {}...", i);
tx.send(heap).unwrap();
});
}
let mut consolidated_heap = BinaryHeap::<State>::with_capacity(HEAP_CAPACITY);
for i in 0..NTHREADS {
let received = rx.recv().unwrap();
println!("Received {} items for heap {}", received.len(), i);
for x in received {
consolidated_heap.push(State {
id: x.id,
score: x.score,
scorea: x.scorea,
scoreb: x.scoreb,
scorec: x.scorec,
scored: x.scored,
});
if consolidated_heap.len() > HEAP_CAPACITY {
consolidated_heap.pop();
}
}
}
for x in consolidated_heap {
println!("Response {} - {} - {} - {} - {} - {}",
x.id,
x.score,
x.scorea,
x.scoreb,
x.scorec,
x.scored,
);
}
println!("Scanning memory with capacity {}...complete", SIZE);
println!("Took time: {} ms", now.elapsed().as_millis());
pause();
}
fn pause() {
let mut stdout = io::stdout();
write!(stdout, "Press any key to continue...").unwrap();
stdout.flush().unwrap();
let _ = io::stdin().read(&mut [0u8]).unwrap();
} |
fn union (_a: &Vec<String>,mut _b: &Vec<String>) -> Vec<String> {
let mut _c: Vec<String> = Vec::new();
for ele in _a {
if !_c.contains(&ele) {
_c.push(ele.to_string());
}
}
for ele in _b {
if !_c.contains(&ele) {
_c.push(ele.to_string());
}
}
_c
}
fn intersection(_a: &Vec<String>,mut _b: &Vec<String>) -> Vec<String> {
let mut _c: Vec<String> = Vec::new();
for ele in _a {
if _b.contains(&ele) & !_c.contains(&ele){
_c.push(ele.to_string());
}
}
_c
}
fn cir_count(_s: &str) -> Vec<i32>{
let mut v: Vec<i32> = Vec::new();
let mut count = 0;
for ch in _s.chars(){
if ch == ' '{
v.push(count);
count = 0;
}else{
count += 1;
}
}
v
}
fn resolve_word(s: &str) -> Vec<String> {
let mut x: Vec<String> = Vec::new();
let mut word: String = String::new();
for ch in s.chars() {
if ch == ' ' {
x.push(word);
word = String::new();
}else{
word = format!("{}{}",word,ch.to_string());
}
}
x
}
|
/// bindings for ARINC653P2-4 3.4 schedules
pub mod basic {
use crate::bindings::*;
pub type ScheduleName = ApexName;
/// According to ARINC 653P1-5 this may either be 32 or 64 bits.
/// Internally we will use 64-bit by default.
/// The implementing Hypervisor may cast this to 32-bit if needed
pub type ScheduleId = ApexLongInteger;
pub trait ApexScheduleP2 {
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn set_module_schedule<L: Locked>(schedule_id: ScheduleId) -> Result<(), ErrorReturnCode>;
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_module_schedule_status<L: Locked>() -> Result<ApexScheduleStatus, ErrorReturnCode>;
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_module_schedule_id<L: Locked>(
schedule_name: ScheduleName,
) -> Result<ScheduleId, ErrorReturnCode>;
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ApexScheduleStatus {
pub time_of_last_schedule_switch: ApexSystemTime,
pub current_schedule: ScheduleId,
pub next_schedule: ScheduleId,
}
}
/// abstractions for ARINC653P2-4 3.4 schedules
pub mod abstraction {
use super::basic::{ScheduleId, ScheduleName};
use crate::bindings::*;
use crate::hidden::Key;
use crate::prelude::*;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScheduleStatus {
pub time_of_last_schedule_switch: SystemTime,
pub current_schedule: ScheduleId,
pub next_schedule: ScheduleId,
}
impl From<ApexScheduleStatus> for ScheduleStatus {
fn from(s: ApexScheduleStatus) -> Self {
ScheduleStatus {
time_of_last_schedule_switch: s.time_of_last_schedule_switch.into(),
current_schedule: s.current_schedule,
next_schedule: s.next_schedule,
}
}
}
pub trait ApexScheduleP2Ext: ApexScheduleP2 + Sized {
fn set_module_schedule(schedule_id: ScheduleId) -> Result<(), Error>;
fn get_module_schedule_status() -> Result<ScheduleStatus, Error>;
fn get_module_schedule_id(schedule_name: ScheduleName) -> Result<ScheduleId, Error>;
}
impl<T: ApexScheduleP2> ApexScheduleP2Ext for T {
fn set_module_schedule(schedule_id: ScheduleId) -> Result<(), Error> {
T::set_module_schedule::<Key>(schedule_id)?;
Ok(())
}
fn get_module_schedule_status() -> Result<ScheduleStatus, Error> {
Ok(T::get_module_schedule_status::<Key>().map(ScheduleStatus::from)?)
}
fn get_module_schedule_id(schedule_name: ScheduleName) -> Result<ScheduleId, Error> {
Ok(T::get_module_schedule_id::<Key>(schedule_name)?)
}
}
}
|
#![deny(missing_docs)]
#![doc(html_root_url = "http://arcnmx.github.io/result-rs/")]
//! Helpers for dealing with nested `Result` and `Option` types. Convert a
//! `Option<Result<T, E>>` to `Result<Option<T>, E>` and vice versa.
//!
//! `use result::prelude::*` is recommended in order to import the extension
//! traits into scope.
/// Module that contains all extension traits useful for working with nested
/// `Option` and `Result` types.
pub mod prelude {
pub use super::{ResultOptionExt, ResultIteratorExt};
}
/// Extension trait for nested `Option` and `Result` types.
///
/// Alias of `ResultOptionExt` but could become a distinct separate trait in the
/// future.
pub use ::ResultOptionExt as OptionResultExt;
/// Extension trait for nested `Option` and `Result` types.
pub trait ResultOptionExt {
/// The inverted output type of the operation.
type Out;
/// Inverts a nested `Option<Result<T, E>>` or `Result<Option<T>, E>`
fn invert(self) -> Self::Out;
}
impl<T, E> OptionResultExt for Option<Result<T, E>> {
type Out = Result<Option<T>, E>;
fn invert(self) -> Self::Out {
match self {
Some(Err(e)) => Err(e),
Some(Ok(v)) => Ok(Some(v)),
None => Ok(None)
}
}
}
impl<T, E> ResultOptionExt for Result<Option<T>, E> {
type Out = Option<Result<T, E>>;
fn invert(self) -> Self::Out {
match self {
Ok(None) => None,
Ok(Some(v)) => Some(Ok(v)),
Err(e) => Some(Err(e)),
}
}
}
/// Extension trait for iterators that produce `Result` types
pub trait ResultIteratorExt {
/// `Ok(Val)`
type Val;
/// `Err(Err)`
type Err;
/// `Iterator::next` inverted returns a `Result`.
fn next_invert(&mut self) -> Result<Option<Self::Val>, Self::Err>;
}
impl<T, E, I: Iterator<Item=Result<T, E>>> ResultIteratorExt for I {
type Val = T;
type Err = E;
fn next_invert(&mut self) -> Result<Option<Self::Val>, Self::Err> {
self.next().invert()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_option_result() {
use std::iter;
let mut iter = iter::once(Err::<(), _>(0));
let err = iter.next();
assert!(err.invert().is_err());
assert_eq!(err.invert().invert(), err);
let none = iter.next();
assert_eq!(none.invert(), Ok(None));
assert_eq!(none.invert().invert(), none);
}
}
|
pub struct GridPosition
{
pub column: i32,
pub row: i32,
}
pub fn grid_to_position(grid_pos: &GridPosition, width: f32, height: f32) -> cgmath::Vector2<f32>
{
let tile_width = 2.0 / width;
let tile_height = 2.0 / height;
cgmath::Vector2 { x: -1.0 + tile_width / 2.0 + grid_pos.column as f32 * tile_width,
y: 1.0 - tile_height / 2.0 - grid_pos.row as f32 * tile_height }
}
pub struct SpriteAnimationMetaData
{
pub from_index: i32,
pub to_index: i32,
pub timeout: f32,
}
impl std::fmt::Display for SpriteAnimationMetaData
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result
{
write!(f, "({}, {}, {})", self.from_index, self.to_index, self.timeout)
}
} |
extern crate usbd_hid_descriptors;
use syn::{parse, Field, Fields, Type, Expr, Result, Ident, ExprLit, Lit, TypePath};
use usbd_hid_descriptors::*;
use crate::spec::*;
// MainItem describes all the mandatory data points of a Main item.
#[derive(Debug, Default, Clone)]
pub struct MainItem {
pub kind: MainItemKind,
pub logical_minimum: isize,
pub logical_maximum: isize,
pub report_count: u16,
pub report_size: u16,
pub padding_bits: Option<u16>,
}
#[derive(Debug, Clone)]
pub struct ReportUnaryField {
pub bit_width: usize,
pub descriptor_item: MainItem,
pub ident: Ident,
}
/// analyze_field constructs a main item from an item spec & field.
pub fn analyze_field(field: Field, ft: Type, item: &ItemSpec) -> Result<ReportUnaryField> {
let (p, size) = parse_type(&field, ft)?;
if p.path.segments.len() != 1 {
return Err(
parse::Error::new(field.ident.unwrap().span(), "`#[gen_hid_descriptor]` internal error when unwrapping type")
);
}
let type_ident = p.path.segments[0].ident.clone();
let type_str = type_ident.to_string();
let (sign, size_str) = type_str.as_str().split_at(1);
let bit_width = size_str.parse();
let type_setter: Option<fn(&mut ReportUnaryField, usize)> = match sign {
"u" => Some(set_unsigned_unary_item),
"i" => Some(set_signed_unary_item),
&_ => None
};
if bit_width.is_err() || type_setter.is_none() {
return Err(
parse::Error::new(type_ident.span(), "`#[gen_hid_descriptor]` type not supported")
)
}
let bit_width = bit_width.unwrap();
if bit_width >= 64 {
return Err(
parse::Error::new(type_ident.span(), "`#[gen_hid_descriptor]` integer larger than 64 is not supported in ssmarshal")
)
}
let mut output = unary_item(field.ident.clone().unwrap(), item.kind, bit_width);
if let Some(want_bits) = item.want_bits { // bitpack
output.descriptor_item.logical_minimum = 0;
output.descriptor_item.logical_maximum = 1;
output.descriptor_item.report_count = want_bits;
output.descriptor_item.report_size = 1;
let width = output.bit_width * size;
if width < want_bits as usize {
return Err(
parse::Error::new(field.ident.unwrap().span(), format!("`#[gen_hid_descriptor]` not enough space, missing {} bit(s)", want_bits as usize - width))
)
}
let remaining_bits = width as u16 - want_bits;
if remaining_bits > 0 {
output.descriptor_item.padding_bits = Some(remaining_bits);
}
} else { // array of reports
type_setter.unwrap()(&mut output, bit_width);
output.descriptor_item.report_count *= size as u16;
}
Ok(output)
}
fn parse_type(field: &Field, ft: Type) -> Result<(TypePath, usize)> {
match ft {
Type::Array(a) => {
let mut size: usize = 0;
if let Expr::Lit(ExprLit { lit, .. }) = a.len {
if let Lit::Int(lit) = lit {
if let Ok(num) = lit.base10_parse::<usize>() {
size = num;
}
}
}
if size == 0 {
Err(
parse::Error::new(field.ident.as_ref().unwrap().span(), "`#[gen_hid_descriptor]` array has invalid length")
)
} else {
Ok((parse_type(&field, *a.elem)?.0, size))
}
}
Type::Path(p) => {
Ok((p, 1))
}
_ => {
Err(
parse::Error::new(field.ident.as_ref().unwrap().span(),"`#[gen_hid_descriptor]` cannot handle field type")
)
}
}
}
fn set_signed_unary_item(out: &mut ReportUnaryField, bit_width: usize) {
let bound = 2u32.pow((bit_width-1) as u32) as isize - 1;
out.descriptor_item.logical_minimum = -bound;
out.descriptor_item.logical_maximum = bound;
}
fn set_unsigned_unary_item(out: &mut ReportUnaryField, bit_width: usize) {
out.descriptor_item.logical_minimum = 0;
out.descriptor_item.logical_maximum = 2u32.pow(bit_width as u32) as isize - 1;
}
fn unary_item(id: Ident, kind: MainItemKind, bit_width: usize) -> ReportUnaryField {
ReportUnaryField{
ident: id,
bit_width,
descriptor_item: MainItem{
kind,
logical_minimum: 0,
logical_maximum: 0,
report_count: 1,
report_size: bit_width as u16,
padding_bits: None,
},
}
}
pub fn field_decl(fields: &Fields, name: String) -> Field {
for field in fields {
let ident = field.ident.clone().unwrap().to_string();
if ident == name {
return field.clone();
}
}
panic!("internal error: could not find field {} which should exist", name)
}
|
#![no_std]
extern crate rawpointer;
extern crate core as std;
mod iter;
pub use iter::{SliceIter};
|
//! Cretonne file reader library.
//!
//! The `cretonne_reader` library supports reading .cton files. This functionality is needed for
//! testing Cretonne, but is not essential for a JIT compiler.
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
#![warn(unused_import_braces, unstable_features)]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
#![cfg_attr(feature = "cargo-clippy",
allow(new_without_default, new_without_default_derive))]
#![cfg_attr(feature="cargo-clippy", warn(
float_arithmetic,
mut_mut,
nonminimal_bool,
option_map_unwrap_or,
option_map_unwrap_or_else,
print_stdout,
unicode_not_nfc,
use_self,
))]
extern crate cretonne_codegen;
pub use error::{Error, Location, Result};
pub use isaspec::{parse_options, IsaSpec};
pub use parser::{parse_functions, parse_test};
pub use sourcemap::SourceMap;
pub use testcommand::{TestCommand, TestOption};
pub use testfile::{Comment, Details, TestFile};
mod error;
mod isaspec;
mod lexer;
mod parser;
mod sourcemap;
mod testcommand;
mod testfile;
|
use std::iter;
use std::io::BufReader;
use std::default::Default;
use html5ever::parse_document;
use html5ever::tendril::TendrilSink;
use html5ever::rcdom::{Handle, NodeData, RcDom};
// Reference
// [Rustでスクレイピング(html5ever)](https://qiita.com/nacika_ins/items/c618c503cdc0080c7db8)
// scanning the node
fn walk(indent: usize, node: Handle) {
fn escape_default(s: &str) -> String {
// chars() returns an iterator
s.chars().flat_map(|c| c.escape_default()).collect()
}
// express indent
print!("{}", iter::repeat(" ").take(indent).collect::<String>());
match node.data {
NodeData::Document => println!("#Document"),
NodeData::Doctype {
ref name,
ref public_id,
ref system_id,
} => println!("<!DOCTYPE {} \"{}\" \"{}\">", name, public_id, system_id),
NodeData::Text { ref contents } => {
println!("#text: {}", escape_default(&contents.borrow()))
}
NodeData::Comment { ref contents } => println!("<!-- {} -->", escape_default(contents)),
NodeData::Element {
ref name,
ref attrs,
..
} => {
assert!(name.ns == ns!(html));
print!("<{}", name.local);
for attr in attrs.borrow().iter() {
assert!(attr.name.ns == ns!());
print!(" {}=\"{}\"", attr.name.local, attr.value);
}
println!(">");
}
NodeData::ProcessingInstruction { .. } => unreachable!(),
}
// children returns the `Handle` type value.
// `Handle` implements Clone to get another reference to the same node
// [todo]Does the borrow method return `Array` ?
for child in node.children.borrow().iter() {
walk(indent + 2, child.clone()); // 2 is indent space
}
}
pub fn f() {
let text = "
<html>
<body>
hello
<font color=\"red\">web</font>
world
!!
</body>
</html>
"
.to_owned(); // convert &str to String
let dom = parse_document(RcDom::default(), Default::default())
.from_utf8()
.read_from(&mut BufReader::new(text.as_bytes()))
.unwrap();
walk(0, dom.document); // dom.document is `Handle` type and we manipulate DOM from this.
} |
use indicatif::{ProgressBar, ProgressStyle};
use std::convert::TryInto;
pub enum Message {
Update(u32),
Kill,
}
pub struct ProgBar {
bar: ProgressBar,
outfile: String,
}
impl ProgBar {
pub fn new(outfile: &String, full_val: u32) -> ProgBar {
let bar = ProgressBar::new(full_val.try_into().unwrap());
bar.set_style(
ProgressStyle::default_bar()
.template("{prefix} [{wide_bar:.green/red}] {percent}%")
.progress_chars("==-"),
);
bar.set_prefix(format!("Building: '{}'", outfile));
bar.set_position(0);
ProgBar {
bar,
outfile: outfile.clone(),
}
}
pub fn update(&mut self, val: u64) {
if val == self.bar.length() {
self.bar.set_position(self.bar.length());
self.bar.set_style(
ProgressStyle::default_bar()
.template("{prefix:.green} [{wide_bar:.green/red}] {percent}%")
.progress_chars("==-"),
);
self.bar.set_prefix(format!("Done: '{}'", self.outfile));
self.bar.finish();
}
self.bar
.set_message(format!("Building: '{}'", self.outfile));
self.bar.set_position(val);
}
}
|
use std::{
ops::{Add,AddAssign,Sub,SubAssign,Mul,MulAssign,Div,DivAssign,Neg},
mem,
};
use crate::traits::*;
use num_traits::Signed;
use serde::{Serialize, Deserialize};
pub use float_cmp::{Ulps,ApproxEq};
macro_rules! implement_vector {
($type:ident {
dim: $dim:expr,
elems: { $($num:expr => $member:ident),+ }
}) => {
//
// DEFINE THE TYPE
//
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Serialize, Deserialize)]
pub struct $type<T> where T: Base {
$(pub $member: T),*
}
//
// IMPLEMENTATION WHEN BASE TRAIT
//
impl<T: Base> $type<T> {
pub fn new($($member: T),*) -> Self {
$type { $($member: $member),* }
}
#[inline]
pub fn dims() -> usize {
($({let $member = 1; $member} +)* 0)
}
#[inline]
pub fn zero() -> Self {
$type { $($member: T::zero()),* }
}
#[inline]
pub fn one() -> Self {
$type { $($member: T::one()),* }
}
// TODO(henk): Explain that one can use .as_array().clone() to create a new array.
#[inline]
pub fn as_array(&self) -> &[T; $dim] {
unsafe { mem::transmute(self) }
}
}
//
// IMPLEMENTATION WHEN BASEFLOAT TRAIT
//
impl<T> $type<T>
where T: BaseFloat
{
#[inline]
pub fn length(&self) -> T
where T: AddAssign + Mul<Output=T>
{
let mut sum = T::zero();
$(sum += self.$member * self.$member;)*
T::sqrt(sum)
}
#[inline]
pub fn normalize(&self) -> $type<T>
where T: Div<Output=T> + Mul<Output=T>
{
self / self.length()
}
}
//
// DEFAULT TRAIT
//
impl<T> Default for $type<T>
where T: Base
{
fn default() -> Self {
$type::new(
$({ let $member = T::zero(); $member }),*
)
}
}
// --------------------------------------------------------------------------
// Elem Trait
// --------------------------------------------------------------------------
impl<T: Base> Elem<usize> for $type<T> {
type Output = T;
#[inline]
fn elem(self, index: usize) -> Self::Output {
match index {
$($num => self.$member,)*
_ => panic!("index out of range")
}
}
}
impl<'a, T: Base> Elem<usize> for &'a $type<T> {
type Output = &'a T;
#[inline]
fn elem(self, index: usize) -> Self::Output {
match index {
$($num => &self.$member,)*
_ => panic!("index out of range")
}
}
}
impl<'a, T: Base> Elem<usize> for &'a mut $type<T> {
type Output = &'a mut T;
#[inline]
fn elem(self, index: usize) -> Self::Output {
match index {
$($num => &mut self.$member,)*
_ => panic!("index out of range")
}
}
}
impl<T: BaseFloat> ApproxEq for $type<T>
where T: ApproxEq<Flt=T>
{
type Flt = T;
#[inline]
fn approx_eq(&self, other: &Self, epsilon: T, ulps: <T as Ulps>::U) -> bool {
$(self.$member.approx_eq(&other.$member, epsilon, ulps))&&*
}
}
//
// IMPLEMENT BINARY OPERATORS
//
// v + v
implement_binary_operator!(Add<$type<T>> for $type<T>,
fn add(lhs, rhs) -> $type<T> {
$type::new( $(lhs.$member + rhs.$member),* )
}
);
// v += v
implement_binary_assign_operator!(AddAssign<$type<T>> for $type<T>,
fn add_assign(lhs, rhs) {{
$(lhs.$member += rhs.$member;)*
}}
);
// v - v
implement_binary_operator!(Sub<$type<T>> for $type<T>,
fn sub(lhs, rhs) -> $type<T> {
$type::new( $(lhs.$member - rhs.$member),* )
}
);
// v -= v
implement_binary_assign_operator!(SubAssign<$type<T>> for $type<T>,
fn sub_assign(lhs, rhs) {{
$(lhs.$member -= rhs.$member;)*
}}
);
// v * s
implement_binary_operator!(Mul<T> for $type<T>,
fn mul(vector, scalar) -> $type<T> {
$type::new( $(vector.$member * scalar),* )
}
);
// v *= s
implement_binary_assign_operator!(MulAssign<T> for $type<T>,
fn mul_assign(vector, scalar) {{
$(vector.$member *= scalar;)*
}}
);
// v * v
implement_binary_operator!(Mul<$type<T>> for $type<T>,
fn mul(lhs, rhs) -> $type<T> {
$type::new( $(lhs.$member * rhs.$member),* )
}
);
// v *= v
implement_binary_assign_operator!(MulAssign<$type<T>> for $type<T>,
fn mul_assign(lhs, rhs) {{
$(lhs.$member *= rhs.$member;)*
}}
);
// v / s
implement_binary_operator!(Div<T> for $type<T>,
fn div(vector, scalar) -> $type<T> {
$type::new( $(vector.$member / scalar),* )
}
);
// v /= s
implement_binary_assign_operator!(DivAssign<T> for $type<T>,
fn div_assign(vector, scalar) {{
$(vector.$member /= scalar;)*
}}
);
// v / v
implement_binary_operator!(Div<$type<T>> for $type<T>,
fn div(lhs, rhs) -> $type<T> {
$type::new( $(lhs.$member / rhs.$member),* )
}
);
// v /= s
implement_binary_assign_operator!(DivAssign<$type<T>> for $type<T>,
fn div_assign(lhs, rhs) {{
$(lhs.$member /= rhs.$member;)*
}}
);
// --------------------------------------------------------------------------
// Own traits
// --------------------------------------------------------------------------
// v cw_min v
implement_binary_operator!(CwMin<$type<T>> for $type<T>,
fn cw_min(lhs, rhs) -> $type<T> {
$type::new( $(if lhs.$member < rhs.$member { lhs.$member } else { rhs.$member }),* )
}
);
// v cw_max v
implement_binary_operator!(CwMax<$type<T>> for $type<T>,
fn cw_max(lhs, rhs) -> $type<T> {
$type::new( $(if rhs.$member < lhs.$member { lhs.$member } else { rhs.$member }),* )
}
);
// v dot v
implement_binary_operator!(Dot<$type<T>> for $type<T>,
fn dot(lhs, rhs) -> T {{
let mut sum = T::zero();
$(sum += lhs.$member * rhs.$member;)*
sum
}}
);
implement_unary_operator!(Neg for $type<T> where T: Neg<Output=T>,
fn neg(this) -> $type<T> {
$type::new(
$(-this.$member),*
)
}
);
implement_unary_operator!(CwAbs for $type<T> where T: Signed,
fn cw_abs(this) -> $type<T> {
$type::new( $(this.$member.abs()),* )
}
);
/*
implement_binary_operator!(Elem<usize> for $type<T>,
fn elem(this, index) -> T {{
this.to_array()[index]
}}
);
*/
// --------------------------------------------------------------------------
// Operators that cannot be implemented generically
// --------------------------------------------------------------------------
implement_specific_operators_for_vector!($type { $($member),* } for i8 );
implement_specific_operators_for_vector!($type { $($member),* } for i16);
implement_specific_operators_for_vector!($type { $($member),* } for i32);
implement_specific_operators_for_vector!($type { $($member),* } for i64);
implement_specific_operators_for_vector!($type { $($member),* } for u8 );
implement_specific_operators_for_vector!($type { $($member),* } for u16);
implement_specific_operators_for_vector!($type { $($member),* } for u32);
implement_specific_operators_for_vector!($type { $($member),* } for u64);
implement_specific_operators_for_vector!($type { $($member),* } for f32);
implement_specific_operators_for_vector!($type { $($member),* } for f64);
}
}
implement_vector!(Vector2 { dim: 2, elems: { 0 => x, 1 => y } });
implement_vector!(Vector3 { dim: 3, elems: { 0 => x, 1 => y, 2 => z } });
implement_vector!(Vector4 { dim: 4, elems: { 0 => x, 1 => y, 2 => z, 3 => w } });
implement_binary_operator!(Cross<Vector3<T>> for Vector3<T>,
fn cross(a, b) -> Vector3<T> {
Vector3::new(
a.y*b.z - a.z*b.y,
a.z*b.x - a.x*b.z,
a.x*b.y - a.y*b.x,
)
}
);
impl<T: Base> Vector4<T> {
pub fn wdiv(&self) -> Vector3<T> {
Vector3::new(self.x/self.w, self.y/self.w, self.z/self.w)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.